• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2018 The Android Open Source Project
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expresso or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 #include "VkDecoderGlobalState.h"
15 
16 #include <algorithm>
17 #include <functional>
18 #include <list>
19 #include <memory>
20 #include <mutex>
21 #include <unordered_map>
22 #include <vector>
23 
24 #include "BlobManager.h"
25 #include "FrameBuffer.h"
26 #include "RenderThreadInfoVk.h"
27 #include "VkAndroidNativeBuffer.h"
28 #include "VkCommonOperations.h"
29 #include "VkDecoderContext.h"
30 #include "VkDecoderSnapshot.h"
31 #include "VulkanDispatch.h"
32 #include "VulkanStream.h"
33 #include "aemu/base/ManagedDescriptor.hpp"
34 #include "aemu/base/Optional.h"
35 #include "aemu/base/Tracing.h"
36 #include "aemu/base/containers/EntityManager.h"
37 #include "aemu/base/containers/HybridEntityManager.h"
38 #include "aemu/base/containers/Lookup.h"
39 #include "aemu/base/files/Stream.h"
40 #include "aemu/base/memory/SharedMemory.h"
41 #include "aemu/base/synchronization/ConditionVariable.h"
42 #include "aemu/base/synchronization/Lock.h"
43 #include "aemu/base/system/System.h"
44 #include "common/goldfish_vk_deepcopy.h"
45 #include "common/goldfish_vk_dispatch.h"
46 #include "common/goldfish_vk_marshaling.h"
47 #include "common/goldfish_vk_reserved_marshaling.h"
48 #include "compressedTextureFormats/AstcCpuDecompressor.h"
49 #include "host-common/GfxstreamFatalError.h"
50 #include "host-common/HostmemIdMapping.h"
51 #include "host-common/address_space_device_control_ops.h"
52 #include "host-common/emugl_vm_operations.h"
53 #include "host-common/feature_control.h"
54 #include "host-common/vm_operations.h"
55 #include "vulkan/VkFormatUtils.h"
56 #include "utils/RenderDoc.h"
57 #include "vk_util.h"
58 #include "vulkan/emulated_textures/AstcTexture.h"
59 #include "vulkan/emulated_textures/CompressedImageInfo.h"
60 #include "vulkan/emulated_textures/GpuDecompressionPipeline.h"
61 #include "vulkan/vk_enum_string_helper.h"
62 
63 #ifndef _WIN32
64 #include <unistd.h>
65 #endif
66 
67 #ifdef __APPLE__
68 #include <CoreFoundation/CoreFoundation.h>
69 #endif
70 
71 #include <climits>
72 
73 namespace gfxstream {
74 namespace vk {
75 
76 using android::base::AutoLock;
77 using android::base::ConditionVariable;
78 using android::base::DescriptorType;
79 using android::base::Lock;
80 using android::base::ManagedDescriptor;
81 using android::base::MetricEventBadPacketLength;
82 using android::base::MetricEventDuplicateSequenceNum;
83 using android::base::MetricEventVulkanOutOfMemory;
84 using android::base::Optional;
85 using android::base::SharedMemory;
86 using android::base::StaticLock;
87 using android::emulation::ManagedDescriptorInfo;
88 using emugl::ABORT_REASON_OTHER;
89 using emugl::FatalError;
90 using emugl::GfxApiLogger;
91 using gfxstream::BlobManager;
92 using gfxstream::VulkanInfo;
93 
94 // TODO(b/261477138): Move to a shared aemu definition
95 #define __ALIGN_MASK(x, mask) (((x) + (mask)) & ~(mask))
96 #define __ALIGN(x, a) __ALIGN_MASK(x, (__typeof__(x))(a)-1)
97 
98 // TODO: Asserts build
99 #define DCHECK(condition) (void)(condition);
100 
101 #define VKDGS_DEBUG 0
102 
103 #if VKDGS_DEBUG
104 #define VKDGS_LOG(fmt, ...) fprintf(stderr, "%s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
105 #else
106 #define VKDGS_LOG(fmt, ...)
107 #endif
108 
109 // Blob mem
110 #define STREAM_BLOB_MEM_GUEST 1
111 #define STREAM_BLOB_MEM_HOST3D 2
112 #define STREAM_BLOB_MEM_HOST3D_GUEST 3
113 
114 // Blob flags
115 #define STREAM_BLOB_FLAG_USE_MAPPABLE 1
116 #define STREAM_BLOB_FLAG_USE_SHAREABLE 2
117 #define STREAM_BLOB_FLAG_USE_CROSS_DEVICE 4
118 #define STREAM_BLOB_FLAG_CREATE_GUEST_HANDLE 8
119 
120 #define VALIDATE_REQUIRED_HANDLE(parameter) \
121     validateRequiredHandle(__FUNCTION__, #parameter, parameter)
122 
123 template <typename T>
validateRequiredHandle(const char * api_name,const char * parameter_name,T value)124 void validateRequiredHandle(const char* api_name, const char* parameter_name, T value) {
125     if (value == VK_NULL_HANDLE) {
126         GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << api_name << ":" << parameter_name;
127     }
128 }
129 
130 #if defined(_WIN32)
131 // External sync objects are HANDLE on Windows
132 typedef HANDLE VK_EXT_SYNC_HANDLE;
133 // corresponds to INVALID_HANDLE_VALUE
134 #define VK_EXT_SYNC_HANDLE_INVALID (VK_EXT_SYNC_HANDLE)(uintptr_t)(-1)
135 #else
136 // External sync objects are fd's on other POSIX systems
137 typedef int VK_EXT_SYNC_HANDLE;
138 #define VK_EXT_SYNC_HANDLE_INVALID (-1)
139 #endif
140 
dupExternalSync(VK_EXT_SYNC_HANDLE h)141 VK_EXT_SYNC_HANDLE dupExternalSync(VK_EXT_SYNC_HANDLE h) {
142 #ifdef _WIN32
143     auto myProcessHandle = GetCurrentProcess();
144     VK_EXT_SYNC_HANDLE res;
145     DuplicateHandle(myProcessHandle, h,     // source process and handle
146                     myProcessHandle, &res,  // target process and pointer to handle
147                     0 /* desired access (ignored) */, true /* inherit */,
148                     DUPLICATE_SAME_ACCESS /* same access option */);
149     return res;
150 #else
151     return dup(h);
152 #endif
153 }
154 
155 // A list of device extensions that should not be passed to the host driver.
156 // These will mainly include Vulkan features that we emulate ourselves.
157 static constexpr const char* const kEmulatedDeviceExtensions[] = {
158     "VK_ANDROID_external_memory_android_hardware_buffer",
159     "VK_ANDROID_native_buffer",
160     "VK_FUCHSIA_buffer_collection",
161     "VK_FUCHSIA_external_memory",
162     "VK_FUCHSIA_external_semaphore",
163     VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME,
164     VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME,
165     VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME,
166     VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME,
167     VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME,
168     VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME,
169     VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
170 #if defined(__QNX__)
171     VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME,
172     VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME,
173 #endif
174 };
175 
176 // A list of instance extensions that should not be passed to the host driver.
177 // On older pre-1.1 Vulkan platforms, gfxstream emulates these features.
178 static constexpr const char* const kEmulatedInstanceExtensions[] = {
179     VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME,
180     VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME,
181     VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME,
182 };
183 
184 static constexpr uint32_t kMaxSafeVersion = VK_MAKE_VERSION(1, 3, 0);
185 static constexpr uint32_t kMinVersion = VK_MAKE_VERSION(1, 0, 0);
186 
187 static constexpr uint64_t kPageSizeforBlob = 4096;
188 static constexpr uint64_t kPageMaskForBlob = ~(0xfff);
189 
190 static uint64_t hostBlobId = 0;
191 
192 #define DEFINE_BOXED_HANDLE_TYPE_TAG(type) Tag_##type,
193 
194 enum BoxedHandleTypeTag {
195     Tag_Invalid = 0,
196     GOLDFISH_VK_LIST_HANDLE_TYPES_BY_STAGE(DEFINE_BOXED_HANDLE_TYPE_TAG)
197 };
198 
199 template <class T>
200 class BoxedHandleManager {
201    public:
202     // The hybrid entity manager uses a sequence lock to protect access to
203     // a working set of 16000 handles, allowing us to avoid using a regular
204     // lock for those. Performance is degraded when going over this number,
205     // as it will then fall back to a std::map.
206     //
207     // We use 16000 as the max number of live handles to track; we don't
208     // expect the system to go over 16000 total live handles, outside some
209     // dEQP object management tests.
210     using Store = android::base::HybridEntityManager<16000, uint64_t, T>;
211 
212     Lock lock;
213     mutable Store store;
214     std::unordered_map<uint64_t, uint64_t> reverseMap;
215     struct DelayedRemove {
216         uint64_t handle;
217         std::function<void()> callback;
218     };
219     std::unordered_map<VkDevice, std::vector<DelayedRemove>> delayedRemoves;
220 
clear()221     void clear() {
222         reverseMap.clear();
223         store.clear();
224     }
225 
add(const T & item,BoxedHandleTypeTag tag)226     uint64_t add(const T& item, BoxedHandleTypeTag tag) {
227         auto res = (uint64_t)store.add(item, (size_t)tag);
228         AutoLock l(lock);
229         reverseMap[(uint64_t)(item.underlying)] = res;
230         return res;
231     }
232 
addFixed(uint64_t handle,const T & item,BoxedHandleTypeTag tag)233     uint64_t addFixed(uint64_t handle, const T& item, BoxedHandleTypeTag tag) {
234         auto res = (uint64_t)store.addFixed(handle, item, (size_t)tag);
235         AutoLock l(lock);
236         reverseMap[(uint64_t)(item.underlying)] = res;
237         return res;
238     }
239 
remove(uint64_t h)240     void remove(uint64_t h) {
241         auto item = get(h);
242         if (item) {
243             AutoLock l(lock);
244             reverseMap.erase((uint64_t)(item->underlying));
245         }
246         store.remove(h);
247     }
248 
removeDelayed(uint64_t h,VkDevice device,std::function<void ()> callback)249     void removeDelayed(uint64_t h, VkDevice device, std::function<void()> callback) {
250         AutoLock l(lock);
251         delayedRemoves[device].push_back({h, callback});
252     }
253 
processDelayedRemovesGlobalStateLocked(VkDevice device)254     void processDelayedRemovesGlobalStateLocked(VkDevice device) {
255         AutoLock l(lock);
256         auto it = delayedRemoves.find(device);
257         if (it == delayedRemoves.end()) return;
258         auto& delayedRemovesList = it->second;
259         for (const auto& r : delayedRemovesList) {
260             auto h = r.handle;
261             // VkDecoderGlobalState is already locked when callback is called.
262             auto funcGlobalStateLocked = r.callback;
263             funcGlobalStateLocked();
264             store.remove(h);
265         }
266         delayedRemovesList.clear();
267         delayedRemoves.erase(it);
268     }
269 
get(uint64_t h)270     T* get(uint64_t h) { return (T*)store.get_const(h); }
271 
getBoxedFromUnboxedLocked(uint64_t unboxed)272     uint64_t getBoxedFromUnboxedLocked(uint64_t unboxed) {
273         auto* res = android::base::find(reverseMap, unboxed);
274         if (!res) return 0;
275         return *res;
276     }
277 };
278 
279 struct OrderMaintenanceInfo {
280     uint32_t sequenceNumber = 0;
281     Lock lock;
282     ConditionVariable cv;
283 
284     uint32_t refcount = 1;
285 
incRefgfxstream::vk::OrderMaintenanceInfo286     void incRef() { __atomic_add_fetch(&refcount, 1, __ATOMIC_SEQ_CST); }
287 
decRefgfxstream::vk::OrderMaintenanceInfo288     bool decRef() { return 0 == __atomic_sub_fetch(&refcount, 1, __ATOMIC_SEQ_CST); }
289 };
290 
acquireOrderMaintInfo(OrderMaintenanceInfo * ord)291 static void acquireOrderMaintInfo(OrderMaintenanceInfo* ord) {
292     if (!ord) return;
293     ord->incRef();
294 }
295 
releaseOrderMaintInfo(OrderMaintenanceInfo * ord)296 static void releaseOrderMaintInfo(OrderMaintenanceInfo* ord) {
297     if (!ord) return;
298     if (ord->decRef()) delete ord;
299 }
300 
301 template <class T>
302 class DispatchableHandleInfo {
303    public:
304     T underlying;
305     VulkanDispatch* dispatch = nullptr;
306     bool ownDispatch = false;
307     OrderMaintenanceInfo* ordMaintInfo = nullptr;
308     VulkanMemReadingStream* readStream = nullptr;
309 };
310 
311 static BoxedHandleManager<DispatchableHandleInfo<uint64_t>> sBoxedHandleManager;
312 
313 struct ReadStreamRegistry {
314     Lock mLock;
315 
316     std::vector<VulkanMemReadingStream*> freeStreams;
317 
ReadStreamRegistrygfxstream::vk::ReadStreamRegistry318     ReadStreamRegistry() { freeStreams.reserve(100); };
319 
popgfxstream::vk::ReadStreamRegistry320     VulkanMemReadingStream* pop() {
321         AutoLock lock(mLock);
322         if (freeStreams.empty()) {
323             return new VulkanMemReadingStream(0);
324         } else {
325             VulkanMemReadingStream* res = freeStreams.back();
326             freeStreams.pop_back();
327             return res;
328         }
329     }
330 
pushgfxstream::vk::ReadStreamRegistry331     void push(VulkanMemReadingStream* stream) {
332         AutoLock lock(mLock);
333         freeStreams.push_back(stream);
334     }
335 };
336 
337 static ReadStreamRegistry sReadStreamRegistry;
338 
339 class VkDecoderGlobalState::Impl {
340    public:
Impl()341     Impl()
342         : m_vk(vkDispatch()),
343           m_emu(getGlobalVkEmulation()),
344           mRenderDocWithMultipleVkInstances(m_emu->guestRenderDoc.get()) {
345         mSnapshotsEnabled = feature_is_enabled(kFeature_VulkanSnapshots);
346         mVkCleanupEnabled =
347             android::base::getEnvironmentVariable("ANDROID_EMU_VK_NO_CLEANUP") != "1";
348         mLogging = android::base::getEnvironmentVariable("ANDROID_EMU_VK_LOG_CALLS") == "1";
349         mVerbosePrints = android::base::getEnvironmentVariable("ANDROID_EMUGL_VERBOSE") == "1";
350         if (get_emugl_address_space_device_control_ops().control_get_hw_funcs &&
351             get_emugl_address_space_device_control_ops().control_get_hw_funcs()) {
352             mUseOldMemoryCleanupPath = 0 == get_emugl_address_space_device_control_ops()
353                                                 .control_get_hw_funcs()
354                                                 ->getPhysAddrStartLocked();
355         }
356         mGuestUsesAngle = feature_is_enabled(kFeature_GuestUsesAngle);
357     }
358 
359     ~Impl() = default;
360 
361     // Resets all internal tracking info.
362     // Assumes that the heavyweight cleanup operations
363     // have already happened.
clear()364     void clear() {
365         mInstanceInfo.clear();
366         mPhysdevInfo.clear();
367         mDeviceInfo.clear();
368         mImageInfo.clear();
369         mImageViewInfo.clear();
370         mSamplerInfo.clear();
371         mCmdBufferInfo.clear();
372         mCmdPoolInfo.clear();
373         mDeviceToPhysicalDevice.clear();
374         mPhysicalDeviceToInstance.clear();
375         mQueueInfo.clear();
376         mBufferInfo.clear();
377         mMemoryInfo.clear();
378         mShaderModuleInfo.clear();
379         mPipelineCacheInfo.clear();
380         mPipelineInfo.clear();
381         mRenderPassInfo.clear();
382         mFramebufferInfo.clear();
383         mSemaphoreInfo.clear();
384         mFenceInfo.clear();
385 #ifdef _WIN32
386         mSemaphoreId = 1;
387         mExternalSemaphoresById.clear();
388 #endif
389         mDescriptorUpdateTemplateInfo.clear();
390 
391         mCreatedHandlesForSnapshotLoad.clear();
392         mCreatedHandlesForSnapshotLoadIndex = 0;
393 
394         sBoxedHandleManager.clear();
395     }
396 
snapshotsEnabled() const397     bool snapshotsEnabled() const { return mSnapshotsEnabled; }
398 
vkCleanupEnabled() const399     bool vkCleanupEnabled() const { return mVkCleanupEnabled; }
400 
save(android::base::Stream * stream)401     void save(android::base::Stream* stream) { snapshot()->save(stream); }
402 
load(android::base::Stream * stream,GfxApiLogger & gfxLogger,HealthMonitor<> * healthMonitor)403     void load(android::base::Stream* stream, GfxApiLogger& gfxLogger,
404               HealthMonitor<>* healthMonitor) {
405         // assume that we already destroyed all instances
406         // from FrameBuffer's onLoad method.
407 
408         // destroy all current internal data structures
409         clear();
410 
411         // this part will replay in the decoder
412         snapshot()->load(stream, gfxLogger, healthMonitor);
413     }
414 
lock()415     void lock() { mLock.lock(); }
416 
unlock()417     void unlock() { mLock.unlock(); }
418 
setCreatedHandlesForSnapshotLoad(const unsigned char * buffer)419     size_t setCreatedHandlesForSnapshotLoad(const unsigned char* buffer) {
420         size_t consumed = 0;
421 
422         if (!buffer) return consumed;
423 
424         uint32_t bufferSize = *(uint32_t*)buffer;
425 
426         consumed += 4;
427 
428         uint32_t handleCount = bufferSize / 8;
429         VKDGS_LOG("incoming handle count: %u", handleCount);
430 
431         uint64_t* handles = (uint64_t*)(buffer + 4);
432 
433         mCreatedHandlesForSnapshotLoad.clear();
434         mCreatedHandlesForSnapshotLoadIndex = 0;
435 
436         for (uint32_t i = 0; i < handleCount; ++i) {
437             VKDGS_LOG("handle to load: 0x%llx", (unsigned long long)(uintptr_t)handles[i]);
438             mCreatedHandlesForSnapshotLoad.push_back(handles[i]);
439             consumed += 8;
440         }
441 
442         return consumed;
443     }
444 
clearCreatedHandlesForSnapshotLoad()445     void clearCreatedHandlesForSnapshotLoad() {
446         mCreatedHandlesForSnapshotLoad.clear();
447         mCreatedHandlesForSnapshotLoadIndex = 0;
448     }
449 
on_vkEnumerateInstanceVersion(android::base::BumpPool * pool,uint32_t * pApiVersion)450     VkResult on_vkEnumerateInstanceVersion(android::base::BumpPool* pool, uint32_t* pApiVersion) {
451         if (m_vk->vkEnumerateInstanceVersion) {
452             VkResult res = m_vk->vkEnumerateInstanceVersion(pApiVersion);
453 
454             if (*pApiVersion > kMaxSafeVersion) {
455                 *pApiVersion = kMaxSafeVersion;
456             }
457 
458             return res;
459         }
460         *pApiVersion = kMinVersion;
461         return VK_SUCCESS;
462     }
463 
on_vkCreateInstance(android::base::BumpPool * pool,const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)464     VkResult on_vkCreateInstance(android::base::BumpPool* pool,
465                                  const VkInstanceCreateInfo* pCreateInfo,
466                                  const VkAllocationCallbacks* pAllocator, VkInstance* pInstance) {
467         std::vector<const char*> finalExts = filteredInstanceExtensionNames(
468             pCreateInfo->enabledExtensionCount, pCreateInfo->ppEnabledExtensionNames);
469 
470         if (pCreateInfo->pApplicationInfo) {
471             if (pCreateInfo->pApplicationInfo->pApplicationName)
472                 INFO("Creating Vulkan instance for app: %s",
473                      pCreateInfo->pApplicationInfo->pApplicationName);
474             if (pCreateInfo->pApplicationInfo->pEngineName)
475                 INFO("Creating Vulkan instance for engine: %s",
476                      pCreateInfo->pApplicationInfo->pEngineName);
477         }
478 
479         // Create higher version instance whenever it is possible.
480         uint32_t apiVersion = VK_MAKE_VERSION(1, 0, 0);
481         if (pCreateInfo->pApplicationInfo) {
482             apiVersion = pCreateInfo->pApplicationInfo->apiVersion;
483         }
484         if (m_vk->vkEnumerateInstanceVersion) {
485             uint32_t instanceVersion;
486             VkResult result = m_vk->vkEnumerateInstanceVersion(&instanceVersion);
487             if (result == VK_SUCCESS && instanceVersion >= VK_MAKE_VERSION(1, 1, 0)) {
488                 apiVersion = instanceVersion;
489             }
490         }
491 
492         VkInstanceCreateInfo createInfoFiltered;
493         VkApplicationInfo appInfo = {};
494         deepcopy_VkInstanceCreateInfo(pool, VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, pCreateInfo,
495                                       &createInfoFiltered);
496 
497         createInfoFiltered.enabledExtensionCount = static_cast<uint32_t>(finalExts.size());
498         createInfoFiltered.ppEnabledExtensionNames = finalExts.data();
499         if (createInfoFiltered.pApplicationInfo != nullptr) {
500             const_cast<VkApplicationInfo*>(createInfoFiltered.pApplicationInfo)->apiVersion =
501                 apiVersion;
502             appInfo = *createInfoFiltered.pApplicationInfo;
503         }
504 
505         // remove VkDebugReportCallbackCreateInfoEXT and
506         // VkDebugUtilsMessengerCreateInfoEXT from the chain.
507         auto* curr = reinterpret_cast<vk_struct_common*>(&createInfoFiltered);
508         while (curr != nullptr) {
509             if (curr->pNext != nullptr &&
510                 (curr->pNext->sType == VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT ||
511                  curr->pNext->sType == VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT)) {
512                 curr->pNext = curr->pNext->pNext;
513             }
514             curr = curr->pNext;
515         }
516 
517         // bug: 155795731
518         bool swiftshader =
519             (android::base::getEnvironmentVariable("ANDROID_EMU_VK_ICD").compare("swiftshader") ==
520              0);
521         std::unique_ptr<std::lock_guard<std::recursive_mutex>> lock = nullptr;
522 
523         if (swiftshader) {
524             if (mLogging) {
525                 fprintf(stderr, "%s: acquire lock\n", __func__);
526             }
527             lock = std::make_unique<std::lock_guard<std::recursive_mutex>>(mLock);
528         }
529 
530         VkResult res = m_vk->vkCreateInstance(&createInfoFiltered, pAllocator, pInstance);
531 
532         if (res != VK_SUCCESS) {
533             return res;
534         }
535 
536         if (!swiftshader) {
537             lock = std::make_unique<std::lock_guard<std::recursive_mutex>>(mLock);
538         }
539 
540         // TODO: bug 129484301
541         get_emugl_vm_operations().setSkipSnapshotSave(
542             !feature_is_enabled(kFeature_VulkanSnapshots));
543 
544         InstanceInfo info;
545         info.apiVersion = apiVersion;
546         for (uint32_t i = 0; i < createInfoFiltered.enabledExtensionCount; ++i) {
547             info.enabledExtensionNames.push_back(createInfoFiltered.ppEnabledExtensionNames[i]);
548         }
549 
550         // Box it up
551         VkInstance boxed = new_boxed_VkInstance(*pInstance, nullptr, true /* own dispatch */);
552         init_vulkan_dispatch_from_instance(m_vk, *pInstance, dispatch_VkInstance(boxed));
553         info.boxed = boxed;
554 
555 #if defined(__APPLE__) && defined(VK_MVK_moltenvk)
556         if (m_emu->instanceSupportsMoltenVK) {
557             if (!m_vk->vkSetMTLTextureMVK) {
558                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << "Cannot find vkSetMTLTextureMVK";
559             }
560         }
561 #endif
562 
563         std::string_view engineName = appInfo.pEngineName ? appInfo.pEngineName : "";
564         info.isAngle = (engineName == "ANGLE");
565 
566         mInstanceInfo[*pInstance] = info;
567 
568         *pInstance = (VkInstance)info.boxed;
569 
570         auto fb = FrameBuffer::getFB();
571         if (!fb) return res;
572 
573         if (vkCleanupEnabled()) {
574             fb->registerProcessCleanupCallback(unbox_VkInstance(boxed), [this, boxed] {
575                 vkDestroyInstanceImpl(unbox_VkInstance(boxed), nullptr);
576             });
577         }
578 
579         return res;
580     }
581 
vkDestroyInstanceImpl(VkInstance instance,const VkAllocationCallbacks * pAllocator)582     void vkDestroyInstanceImpl(VkInstance instance, const VkAllocationCallbacks* pAllocator) {
583         // Do delayed removes out of the lock, but get the list of devices to destroy inside the
584         // lock.
585         {
586             std::lock_guard<std::recursive_mutex> lock(mLock);
587             std::vector<VkDevice> devicesToDestroy;
588 
589             for (auto it : mDeviceToPhysicalDevice) {
590                 auto* otherInstance = android::base::find(mPhysicalDeviceToInstance, it.second);
591                 if (!otherInstance) continue;
592                 if (instance == *otherInstance) {
593                     devicesToDestroy.push_back(it.first);
594                 }
595             }
596 
597             for (auto device : devicesToDestroy) {
598                 sBoxedHandleManager.processDelayedRemovesGlobalStateLocked(device);
599             }
600         }
601 
602         std::lock_guard<std::recursive_mutex> lock(mLock);
603 
604         teardownInstanceLocked(instance);
605 
606         if (mRenderDocWithMultipleVkInstances) {
607             mRenderDocWithMultipleVkInstances->removeVkInstance(instance);
608         }
609         m_vk->vkDestroyInstance(instance, pAllocator);
610 
611         auto it = mPhysicalDeviceToInstance.begin();
612 
613         while (it != mPhysicalDeviceToInstance.end()) {
614             if (it->second == instance) {
615                 it = mPhysicalDeviceToInstance.erase(it);
616             } else {
617                 ++it;
618             }
619         }
620 
621         auto* instInfo = android::base::find(mInstanceInfo, instance);
622         delete_VkInstance(instInfo->boxed);
623         mInstanceInfo.erase(instance);
624     }
625 
on_vkDestroyInstance(android::base::BumpPool * pool,VkInstance boxed_instance,const VkAllocationCallbacks * pAllocator)626     void on_vkDestroyInstance(android::base::BumpPool* pool, VkInstance boxed_instance,
627                               const VkAllocationCallbacks* pAllocator) {
628         auto instance = unbox_VkInstance(boxed_instance);
629 
630         vkDestroyInstanceImpl(instance, pAllocator);
631 
632         auto fb = FrameBuffer::getFB();
633         if (!fb) return;
634 
635         fb->unregisterProcessCleanupCallback(instance);
636     }
637 
on_vkEnumeratePhysicalDevices(android::base::BumpPool * pool,VkInstance boxed_instance,uint32_t * physicalDeviceCount,VkPhysicalDevice * physicalDevices)638     VkResult on_vkEnumeratePhysicalDevices(android::base::BumpPool* pool, VkInstance boxed_instance,
639                                            uint32_t* physicalDeviceCount,
640                                            VkPhysicalDevice* physicalDevices) {
641         auto instance = unbox_VkInstance(boxed_instance);
642         auto vk = dispatch_VkInstance(boxed_instance);
643 
644         uint32_t physicalDevicesSize = 0;
645         if (physicalDeviceCount) {
646             physicalDevicesSize = *physicalDeviceCount;
647         }
648 
649         uint32_t actualPhysicalDeviceCount;
650         auto res = vk->vkEnumeratePhysicalDevices(instance, &actualPhysicalDeviceCount, nullptr);
651         if (res != VK_SUCCESS) {
652             return res;
653         }
654         std::vector<VkPhysicalDevice> validPhysicalDevices(actualPhysicalDeviceCount);
655         res = vk->vkEnumeratePhysicalDevices(instance, &actualPhysicalDeviceCount,
656                                              validPhysicalDevices.data());
657         if (res != VK_SUCCESS) return res;
658 
659         std::lock_guard<std::recursive_mutex> lock(mLock);
660 
661         if (m_emu->instanceSupportsExternalMemoryCapabilities) {
662             PFN_vkGetPhysicalDeviceProperties2KHR getPhysdevProps2Func =
663                 vk_util::getVkInstanceProcAddrWithFallback<
664                     vk_util::vk_fn_info::GetPhysicalDeviceProperties2>(
665                     {
666                         vk->vkGetInstanceProcAddr,
667                         m_vk->vkGetInstanceProcAddr,
668                     },
669                     instance);
670 
671             if (getPhysdevProps2Func) {
672                 validPhysicalDevices.erase(
673                     std::remove_if(validPhysicalDevices.begin(), validPhysicalDevices.end(),
674                                    [getPhysdevProps2Func, this](VkPhysicalDevice physicalDevice) {
675                                        // We can get the device UUID.
676                                        VkPhysicalDeviceIDPropertiesKHR idProps = {
677                                            VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR,
678                                            nullptr,
679                                        };
680                                        VkPhysicalDeviceProperties2KHR propsWithId = {
681                                            VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
682                                            &idProps,
683                                        };
684                                        getPhysdevProps2Func(physicalDevice, &propsWithId);
685 
686                                        // Remove those devices whose UUIDs don't match the one
687                                        // in VkCommonOperations.
688                                        return memcmp(m_emu->deviceInfo.idProps.deviceUUID,
689                                                      idProps.deviceUUID, VK_UUID_SIZE) != 0;
690                                    }),
691                     validPhysicalDevices.end());
692             } else {
693                 fprintf(stderr,
694                         "%s: warning: failed to "
695                         "vkGetPhysicalDeviceProperties2KHR\n",
696                         __func__);
697             }
698         } else {
699             // If we don't support ID properties then just advertise only the
700             // first physical device.
701             fprintf(stderr,
702                     "%s: device id properties not supported, using first "
703                     "physical device\n",
704                     __func__);
705         }
706         if (!validPhysicalDevices.empty()) {
707             validPhysicalDevices.erase(std::next(validPhysicalDevices.begin()),
708                                        validPhysicalDevices.end());
709         }
710 
711         if (physicalDeviceCount) {
712             *physicalDeviceCount = validPhysicalDevices.size();
713         }
714 
715         if (physicalDeviceCount && physicalDevices) {
716             // Box them up
717             for (uint32_t i = 0; i < std::min(*physicalDeviceCount, physicalDevicesSize); ++i) {
718                 mPhysicalDeviceToInstance[validPhysicalDevices[i]] = instance;
719 
720                 auto& physdevInfo = mPhysdevInfo[validPhysicalDevices[i]];
721 
722                 physdevInfo.boxed = new_boxed_VkPhysicalDevice(validPhysicalDevices[i], vk,
723                                                                false /* does not own dispatch */);
724 
725                 vk->vkGetPhysicalDeviceProperties(validPhysicalDevices[i], &physdevInfo.props);
726 
727                 if (physdevInfo.props.apiVersion > kMaxSafeVersion) {
728                     physdevInfo.props.apiVersion = kMaxSafeVersion;
729                 }
730 
731                 vk->vkGetPhysicalDeviceMemoryProperties(validPhysicalDevices[i],
732                                                         &physdevInfo.memoryProperties);
733 
734                 uint32_t queueFamilyPropCount = 0;
735 
736                 vk->vkGetPhysicalDeviceQueueFamilyProperties(validPhysicalDevices[i],
737                                                              &queueFamilyPropCount, nullptr);
738 
739                 physdevInfo.queueFamilyProperties.resize((size_t)queueFamilyPropCount);
740 
741                 vk->vkGetPhysicalDeviceQueueFamilyProperties(
742                     validPhysicalDevices[i], &queueFamilyPropCount,
743                     physdevInfo.queueFamilyProperties.data());
744 
745                 physicalDevices[i] = (VkPhysicalDevice)physdevInfo.boxed;
746             }
747             if (physicalDevicesSize < *physicalDeviceCount) {
748                 res = VK_INCOMPLETE;
749             }
750         }
751 
752         return res;
753     }
754 
on_vkGetPhysicalDeviceFeatures(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceFeatures * pFeatures)755     void on_vkGetPhysicalDeviceFeatures(android::base::BumpPool* pool,
756                                         VkPhysicalDevice boxed_physicalDevice,
757                                         VkPhysicalDeviceFeatures* pFeatures) {
758         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
759         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
760 
761         vk->vkGetPhysicalDeviceFeatures(physicalDevice, pFeatures);
762         pFeatures->textureCompressionETC2 |= enableEmulatedEtc2(physicalDevice, vk);
763         pFeatures->textureCompressionASTC_LDR |= enableEmulatedAstc(physicalDevice, vk);
764     }
765 
on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)766     void on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool* pool,
767                                          VkPhysicalDevice boxed_physicalDevice,
768                                          VkPhysicalDeviceFeatures2* pFeatures) {
769         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
770         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
771 
772         std::lock_guard<std::recursive_mutex> lock(mLock);
773 
774         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
775         if (!physdevInfo) return;
776 
777         auto instance = mPhysicalDeviceToInstance[physicalDevice];
778         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
779         if (!instanceInfo) return;
780 
781         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
782             physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
783             vk->vkGetPhysicalDeviceFeatures2(physicalDevice, pFeatures);
784         } else if (hasInstanceExtension(instance,
785                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
786             vk->vkGetPhysicalDeviceFeatures2KHR(physicalDevice, pFeatures);
787         } else {
788             // No instance extension, fake it!!!!
789             if (pFeatures->pNext) {
790                 fprintf(stderr,
791                         "%s: Warning: Trying to use extension struct in "
792                         "VkPhysicalDeviceFeatures2 without having enabled "
793                         "the extension!!!!11111\n",
794                         __func__);
795             }
796             *pFeatures = {
797                 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
798                 0,
799             };
800             vk->vkGetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
801         }
802 
803         pFeatures->features.textureCompressionETC2 |= enableEmulatedEtc2(physicalDevice, vk);
804         pFeatures->features.textureCompressionASTC_LDR |= enableEmulatedAstc(physicalDevice, vk);
805         VkPhysicalDeviceSamplerYcbcrConversionFeatures* ycbcrFeatures =
806             vk_find_struct<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pFeatures);
807         if (ycbcrFeatures != nullptr) {
808             ycbcrFeatures->samplerYcbcrConversion |= m_emu->enableYcbcrEmulation;
809         }
810     }
811 
on_vkGetPhysicalDeviceImageFormatProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)812     VkResult on_vkGetPhysicalDeviceImageFormatProperties(
813         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice, VkFormat format,
814         VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
815         VkImageFormatProperties* pImageFormatProperties) {
816         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
817         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
818         const bool emulatedTexture = isEmulatedCompressedTexture(format, physicalDevice, vk);
819         if (emulatedTexture) {
820             if (!supportEmulatedCompressedImageFormatProperty(format, type, tiling, usage, flags)) {
821                 memset(pImageFormatProperties, 0, sizeof(VkImageFormatProperties));
822                 return VK_ERROR_FORMAT_NOT_SUPPORTED;
823             }
824             flags &= ~VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT;
825             flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
826             usage |= VK_IMAGE_USAGE_STORAGE_BIT;
827             format = CompressedImageInfo::getCompressedMipmapsFormat(format);
828         }
829 
830         VkResult res = vk->vkGetPhysicalDeviceImageFormatProperties(
831             physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
832         if (res != VK_SUCCESS) {
833             return res;
834         }
835         if (emulatedTexture) {
836             maskImageFormatPropertiesForEmulatedTextures(pImageFormatProperties);
837         }
838         return res;
839     }
840 
on_vkGetPhysicalDeviceImageFormatProperties2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)841     VkResult on_vkGetPhysicalDeviceImageFormatProperties2(
842         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
843         const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
844         VkImageFormatProperties2* pImageFormatProperties) {
845         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
846         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
847         VkPhysicalDeviceImageFormatInfo2 imageFormatInfo;
848         VkFormat format = pImageFormatInfo->format;
849         const bool emulatedTexture = isEmulatedCompressedTexture(format, physicalDevice, vk);
850         if (emulatedTexture) {
851             if (!supportEmulatedCompressedImageFormatProperty(
852                     pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling,
853                     pImageFormatInfo->usage, pImageFormatInfo->flags)) {
854                 memset(&pImageFormatProperties->imageFormatProperties, 0,
855                        sizeof(VkImageFormatProperties));
856                 return VK_ERROR_FORMAT_NOT_SUPPORTED;
857             }
858             imageFormatInfo = *pImageFormatInfo;
859             pImageFormatInfo = &imageFormatInfo;
860             imageFormatInfo.flags &= ~VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT;
861             imageFormatInfo.flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
862             imageFormatInfo.usage |= VK_IMAGE_USAGE_STORAGE_BIT;
863             imageFormatInfo.format = CompressedImageInfo::getCompressedMipmapsFormat(format);
864         }
865         std::lock_guard<std::recursive_mutex> lock(mLock);
866 
867         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
868         if (!physdevInfo) {
869             return VK_ERROR_OUT_OF_HOST_MEMORY;
870         }
871 
872         VkResult res = VK_ERROR_INITIALIZATION_FAILED;
873 
874         auto instance = mPhysicalDeviceToInstance[physicalDevice];
875         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
876         if (!instanceInfo) {
877             return res;
878         }
879 
880         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
881             physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
882             res = vk->vkGetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo,
883                                                                 pImageFormatProperties);
884         } else if (hasInstanceExtension(instance,
885                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
886             res = vk->vkGetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo,
887                                                                    pImageFormatProperties);
888         } else {
889             // No instance extension, fake it!!!!
890             if (pImageFormatProperties->pNext) {
891                 fprintf(stderr,
892                         "%s: Warning: Trying to use extension struct in "
893                         "VkPhysicalDeviceFeatures2 without having enabled "
894                         "the extension!!!!11111\n",
895                         __func__);
896             }
897             *pImageFormatProperties = {
898                 VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
899                 0,
900             };
901             res = vk->vkGetPhysicalDeviceImageFormatProperties(
902                 physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type,
903                 pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags,
904                 &pImageFormatProperties->imageFormatProperties);
905         }
906         if (res != VK_SUCCESS) {
907             return res;
908         }
909 
910         const VkPhysicalDeviceExternalImageFormatInfo* extImageFormatInfo =
911             vk_find_struct<VkPhysicalDeviceExternalImageFormatInfo>(pImageFormatInfo);
912         VkExternalImageFormatProperties* extImageFormatProps =
913             vk_find_struct<VkExternalImageFormatProperties>(pImageFormatProperties);
914 
915         // Only allow dedicated allocations for external images.
916         if (extImageFormatInfo && extImageFormatProps) {
917             extImageFormatProps->externalMemoryProperties.externalMemoryFeatures |=
918                 VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT;
919         }
920 
921         if (emulatedTexture) {
922             maskImageFormatPropertiesForEmulatedTextures(
923                 &pImageFormatProperties->imageFormatProperties);
924         }
925 
926         return res;
927     }
928 
on_vkGetPhysicalDeviceFormatProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)929     void on_vkGetPhysicalDeviceFormatProperties(android::base::BumpPool* pool,
930                                                 VkPhysicalDevice boxed_physicalDevice,
931                                                 VkFormat format,
932                                                 VkFormatProperties* pFormatProperties) {
933         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
934         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
935         getPhysicalDeviceFormatPropertiesCore<VkFormatProperties>(
936             [vk](VkPhysicalDevice physicalDevice, VkFormat format,
937                  VkFormatProperties* pFormatProperties) {
938                 vk->vkGetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
939             },
940             vk, physicalDevice, format, pFormatProperties);
941     }
942 
on_vkGetPhysicalDeviceFormatProperties2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)943     void on_vkGetPhysicalDeviceFormatProperties2(android::base::BumpPool* pool,
944                                                  VkPhysicalDevice boxed_physicalDevice,
945                                                  VkFormat format,
946                                                  VkFormatProperties2* pFormatProperties) {
947         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
948         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
949 
950         std::lock_guard<std::recursive_mutex> lock(mLock);
951 
952         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
953         if (!physdevInfo) return;
954 
955         auto instance = mPhysicalDeviceToInstance[physicalDevice];
956         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
957         if (!instanceInfo) return;
958 
959         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
960             physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
961             getPhysicalDeviceFormatPropertiesCore<VkFormatProperties2>(
962                 [vk](VkPhysicalDevice physicalDevice, VkFormat format,
963                      VkFormatProperties2* pFormatProperties) {
964                     vk->vkGetPhysicalDeviceFormatProperties2(physicalDevice, format,
965                                                              pFormatProperties);
966                 },
967                 vk, physicalDevice, format, pFormatProperties);
968         } else if (hasInstanceExtension(instance,
969                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
970             getPhysicalDeviceFormatPropertiesCore<VkFormatProperties2>(
971                 [vk](VkPhysicalDevice physicalDevice, VkFormat format,
972                      VkFormatProperties2* pFormatProperties) {
973                     vk->vkGetPhysicalDeviceFormatProperties2KHR(physicalDevice, format,
974                                                                 pFormatProperties);
975                 },
976                 vk, physicalDevice, format, pFormatProperties);
977         } else {
978             // No instance extension, fake it!!!!
979             if (pFormatProperties->pNext) {
980                 fprintf(stderr,
981                         "%s: Warning: Trying to use extension struct in "
982                         "vkGetPhysicalDeviceFormatProperties2 without having "
983                         "enabled the extension!!!!11111\n",
984                         __func__);
985             }
986             pFormatProperties->sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2;
987             getPhysicalDeviceFormatPropertiesCore<VkFormatProperties>(
988                 [vk](VkPhysicalDevice physicalDevice, VkFormat format,
989                      VkFormatProperties* pFormatProperties) {
990                     vk->vkGetPhysicalDeviceFormatProperties(physicalDevice, format,
991                                                             pFormatProperties);
992                 },
993                 vk, physicalDevice, format, &pFormatProperties->formatProperties);
994         }
995     }
996 
on_vkGetPhysicalDeviceProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceProperties * pProperties)997     void on_vkGetPhysicalDeviceProperties(android::base::BumpPool* pool,
998                                           VkPhysicalDevice boxed_physicalDevice,
999                                           VkPhysicalDeviceProperties* pProperties) {
1000         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1001         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1002 
1003         vk->vkGetPhysicalDeviceProperties(physicalDevice, pProperties);
1004 
1005         if (pProperties->apiVersion > kMaxSafeVersion) {
1006             pProperties->apiVersion = kMaxSafeVersion;
1007         }
1008     }
1009 
on_vkGetPhysicalDeviceProperties2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceProperties2 * pProperties)1010     void on_vkGetPhysicalDeviceProperties2(android::base::BumpPool* pool,
1011                                            VkPhysicalDevice boxed_physicalDevice,
1012                                            VkPhysicalDeviceProperties2* pProperties) {
1013         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1014         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1015 
1016         std::lock_guard<std::recursive_mutex> lock(mLock);
1017 
1018         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
1019         if (!physdevInfo) return;
1020 
1021         auto instance = mPhysicalDeviceToInstance[physicalDevice];
1022         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
1023         if (!instanceInfo) return;
1024 
1025         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
1026             physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
1027             vk->vkGetPhysicalDeviceProperties2(physicalDevice, pProperties);
1028         } else if (hasInstanceExtension(instance,
1029                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1030             vk->vkGetPhysicalDeviceProperties2KHR(physicalDevice, pProperties);
1031         } else {
1032             // No instance extension, fake it!!!!
1033             if (pProperties->pNext) {
1034                 fprintf(stderr,
1035                         "%s: Warning: Trying to use extension struct in "
1036                         "VkPhysicalDeviceProperties2 without having enabled "
1037                         "the extension!!!!11111\n",
1038                         __func__);
1039             }
1040             *pProperties = {
1041                 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
1042                 0,
1043             };
1044             vk->vkGetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
1045         }
1046 
1047         if (pProperties->properties.apiVersion > kMaxSafeVersion) {
1048             pProperties->properties.apiVersion = kMaxSafeVersion;
1049         }
1050     }
1051 
on_vkGetPhysicalDeviceMemoryProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)1052     void on_vkGetPhysicalDeviceMemoryProperties(
1053         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
1054         VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
1055         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1056         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1057 
1058         vk->vkGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
1059 
1060         // Pick a max heap size that will work around
1061         // drivers that give bad suggestions (such as 0xFFFFFFFFFFFFFFFF for the heap size)
1062         // plus won't break the bank on 32-bit userspace.
1063         static constexpr VkDeviceSize kMaxSafeHeapSize = 2ULL * 1024ULL * 1024ULL * 1024ULL;
1064 
1065         for (uint32_t i = 0; i < pMemoryProperties->memoryTypeCount; ++i) {
1066             uint32_t heapIndex = pMemoryProperties->memoryTypes[i].heapIndex;
1067             auto& heap = pMemoryProperties->memoryHeaps[heapIndex];
1068 
1069             if (heap.size > kMaxSafeHeapSize) {
1070                 heap.size = kMaxSafeHeapSize;
1071             }
1072 
1073             if (!feature_is_enabled(kFeature_GLDirectMem) &&
1074                 !feature_is_enabled(kFeature_VirtioGpuNext)) {
1075                 pMemoryProperties->memoryTypes[i].propertyFlags =
1076                     pMemoryProperties->memoryTypes[i].propertyFlags &
1077                     ~(VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
1078             }
1079         }
1080     }
1081 
on_vkGetPhysicalDeviceMemoryProperties2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)1082     void on_vkGetPhysicalDeviceMemoryProperties2(
1083         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
1084         VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
1085         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1086         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1087 
1088         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
1089         if (!physdevInfo) return;
1090 
1091         auto instance = mPhysicalDeviceToInstance[physicalDevice];
1092         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
1093         if (!instanceInfo) return;
1094 
1095         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
1096             physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
1097             vk->vkGetPhysicalDeviceMemoryProperties2(physicalDevice, pMemoryProperties);
1098         } else if (hasInstanceExtension(instance,
1099                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1100             vk->vkGetPhysicalDeviceMemoryProperties2KHR(physicalDevice, pMemoryProperties);
1101         } else {
1102             // No instance extension, fake it!!!!
1103             if (pMemoryProperties->pNext) {
1104                 fprintf(stderr,
1105                         "%s: Warning: Trying to use extension struct in "
1106                         "VkPhysicalDeviceMemoryProperties2 without having enabled "
1107                         "the extension!!!!11111\n",
1108                         __func__);
1109             }
1110             *pMemoryProperties = {
1111                 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
1112                 0,
1113             };
1114             vk->vkGetPhysicalDeviceMemoryProperties(physicalDevice,
1115                                                     &pMemoryProperties->memoryProperties);
1116         }
1117 
1118         // Pick a max heap size that will work around
1119         // drivers that give bad suggestions (such as 0xFFFFFFFFFFFFFFFF for the heap size)
1120         // plus won't break the bank on 32-bit userspace.
1121         static constexpr VkDeviceSize kMaxSafeHeapSize = 2ULL * 1024ULL * 1024ULL * 1024ULL;
1122 
1123         for (uint32_t i = 0; i < pMemoryProperties->memoryProperties.memoryTypeCount; ++i) {
1124             uint32_t heapIndex = pMemoryProperties->memoryProperties.memoryTypes[i].heapIndex;
1125             auto& heap = pMemoryProperties->memoryProperties.memoryHeaps[heapIndex];
1126 
1127             if (heap.size > kMaxSafeHeapSize) {
1128                 heap.size = kMaxSafeHeapSize;
1129             }
1130 
1131             if (!feature_is_enabled(kFeature_GLDirectMem) &&
1132                 !feature_is_enabled(kFeature_VirtioGpuNext)) {
1133                 pMemoryProperties->memoryProperties.memoryTypes[i].propertyFlags =
1134                     pMemoryProperties->memoryProperties.memoryTypes[i].propertyFlags &
1135                     ~(VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
1136             }
1137         }
1138     }
1139 
on_vkEnumerateDeviceExtensionProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1140     VkResult on_vkEnumerateDeviceExtensionProperties(android::base::BumpPool* pool,
1141                                                      VkPhysicalDevice boxed_physicalDevice,
1142                                                      const char* pLayerName,
1143                                                      uint32_t* pPropertyCount,
1144                                                      VkExtensionProperties* pProperties) {
1145         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1146         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1147 
1148         bool shouldPassthrough = !m_emu->enableYcbcrEmulation;
1149 #if defined(__APPLE__) && defined(VK_MVK_moltenvk)
1150         shouldPassthrough = shouldPassthrough && !m_emu->instanceSupportsMoltenVK;
1151 #endif
1152         if (shouldPassthrough) {
1153             return vk->vkEnumerateDeviceExtensionProperties(physicalDevice, pLayerName,
1154                                                             pPropertyCount, pProperties);
1155         }
1156 
1157         // If MoltenVK is supported on host, we need to ensure that we include
1158         // VK_MVK_moltenvk extenstion in returned properties.
1159         std::vector<VkExtensionProperties> properties;
1160         VkResult result =
1161             enumerateDeviceExtensionProperties(vk, physicalDevice, pLayerName, properties);
1162         if (result != VK_SUCCESS) {
1163             return result;
1164         }
1165 
1166 #if defined(__APPLE__) && defined(VK_MVK_moltenvk)
1167         if (m_emu->instanceSupportsMoltenVK &&
1168             !hasDeviceExtension(properties, VK_MVK_MOLTENVK_EXTENSION_NAME)) {
1169             VkExtensionProperties mvk_props;
1170             strncpy(mvk_props.extensionName, VK_MVK_MOLTENVK_EXTENSION_NAME,
1171                     sizeof(mvk_props.extensionName));
1172             mvk_props.specVersion = VK_MVK_MOLTENVK_SPEC_VERSION;
1173             properties.push_back(mvk_props);
1174         }
1175 #endif
1176 
1177         if (m_emu->enableYcbcrEmulation &&
1178             !hasDeviceExtension(properties, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) {
1179             VkExtensionProperties ycbcr_props;
1180             strncpy(ycbcr_props.extensionName, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
1181                     sizeof(ycbcr_props.extensionName));
1182             ycbcr_props.specVersion = VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION;
1183             properties.push_back(ycbcr_props);
1184         }
1185         if (pProperties == nullptr) {
1186             *pPropertyCount = properties.size();
1187         } else {
1188             // return number of structures actually written to pProperties.
1189             *pPropertyCount = std::min((uint32_t)properties.size(), *pPropertyCount);
1190             memcpy(pProperties, properties.data(), *pPropertyCount * sizeof(VkExtensionProperties));
1191         }
1192         return *pPropertyCount < properties.size() ? VK_INCOMPLETE : VK_SUCCESS;
1193     }
1194 
on_vkCreateDevice(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)1195     VkResult on_vkCreateDevice(android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
1196                                const VkDeviceCreateInfo* pCreateInfo,
1197                                const VkAllocationCallbacks* pAllocator, VkDevice* pDevice) {
1198         if (mLogging) {
1199             fprintf(stderr, "%s: begin\n", __func__);
1200         }
1201 
1202         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1203         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1204 
1205         std::vector<const char*> finalExts =
1206             filteredDeviceExtensionNames(vk, physicalDevice, pCreateInfo->enabledExtensionCount,
1207                                          pCreateInfo->ppEnabledExtensionNames);
1208 
1209         // Run the underlying API call, filtering extensions.
1210         VkDeviceCreateInfo createInfoFiltered = *pCreateInfo;
1211         // According to the spec, it seems that the application can use compressed texture formats
1212         // without enabling the feature when creating the VkDevice, as long as
1213         // vkGetPhysicalDeviceFormatProperties and vkGetPhysicalDeviceImageFormatProperties reports
1214         // support: to query for additional properties, or if the feature is not enabled,
1215         // vkGetPhysicalDeviceFormatProperties and vkGetPhysicalDeviceImageFormatProperties can be
1216         // used to check for supported properties of individual formats as normal.
1217         bool emulateTextureEtc2 = needEmulatedEtc2(physicalDevice, vk);
1218         bool emulateTextureAstc = needEmulatedAstc(physicalDevice, vk);
1219         VkPhysicalDeviceFeatures featuresFiltered;
1220         std::vector<VkPhysicalDeviceFeatures*> featuresToFilter;
1221 
1222         if (pCreateInfo->pEnabledFeatures) {
1223             featuresFiltered = *pCreateInfo->pEnabledFeatures;
1224             createInfoFiltered.pEnabledFeatures = &featuresFiltered;
1225             featuresToFilter.emplace_back(&featuresFiltered);
1226         }
1227 
1228         if (VkPhysicalDeviceFeatures2* features2 =
1229                 vk_find_struct<VkPhysicalDeviceFeatures2>(&createInfoFiltered)) {
1230             featuresToFilter.emplace_back(&features2->features);
1231         }
1232 
1233         for (VkPhysicalDeviceFeatures* feature : featuresToFilter) {
1234             if (emulateTextureEtc2) {
1235                 feature->textureCompressionETC2 = VK_FALSE;
1236             }
1237             if (emulateTextureAstc) {
1238                 feature->textureCompressionASTC_LDR = VK_FALSE;
1239             }
1240         }
1241 
1242         if (auto* ycbcrFeatures = vk_find_struct<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(
1243                 &createInfoFiltered)) {
1244             if (m_emu->enableYcbcrEmulation && !m_emu->deviceInfo.supportsSamplerYcbcrConversion) {
1245                 ycbcrFeatures->samplerYcbcrConversion = VK_FALSE;
1246             }
1247         }
1248 
1249         if (auto* swapchainMaintenance1Features =
1250                 vk_find_struct<VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT>(
1251                     &createInfoFiltered)) {
1252             if (!supportsSwapchainMaintenance1(physicalDevice, vk)) {
1253                 swapchainMaintenance1Features->swapchainMaintenance1 = VK_FALSE;
1254             }
1255         }
1256 
1257         createInfoFiltered.enabledExtensionCount = (uint32_t)finalExts.size();
1258         createInfoFiltered.ppEnabledExtensionNames = finalExts.data();
1259 
1260         // bug: 155795731
1261         bool swiftshader =
1262             (android::base::getEnvironmentVariable("ANDROID_EMU_VK_ICD").compare("swiftshader") ==
1263              0);
1264 
1265         std::unique_ptr<std::lock_guard<std::recursive_mutex>> lock = nullptr;
1266 
1267         if (swiftshader) {
1268             if (mLogging) {
1269                 fprintf(stderr, "%s: acquire lock\n", __func__);
1270             }
1271             lock = std::make_unique<std::lock_guard<std::recursive_mutex>>(mLock);
1272         }
1273 
1274         if (mLogging) {
1275             fprintf(stderr, "%s: got lock, calling host\n", __func__);
1276         }
1277 
1278         VkResult result =
1279             vk->vkCreateDevice(physicalDevice, &createInfoFiltered, pAllocator, pDevice);
1280 
1281         if (mLogging) {
1282             fprintf(stderr, "%s: host returned. result: %d\n", __func__, result);
1283         }
1284 
1285         if (result != VK_SUCCESS) return result;
1286 
1287         if (mLogging) {
1288             fprintf(stderr, "%s: track the new device (begin)\n", __func__);
1289         }
1290 
1291         if (!swiftshader) {
1292             lock = std::make_unique<std::lock_guard<std::recursive_mutex>>(mLock);
1293         }
1294 
1295         mDeviceToPhysicalDevice[*pDevice] = physicalDevice;
1296 
1297         // Fill out information about the logical device here.
1298         auto& deviceInfo = mDeviceInfo[*pDevice];
1299         deviceInfo.physicalDevice = physicalDevice;
1300         deviceInfo.emulateTextureEtc2 = emulateTextureEtc2;
1301         deviceInfo.emulateTextureAstc = emulateTextureAstc;
1302         deviceInfo.useAstcCpuDecompression =
1303             m_emu->astcLdrEmulationMode == AstcEmulationMode::Cpu &&
1304             AstcCpuDecompressor::get().available();
1305         deviceInfo.decompPipelines =
1306             std::make_unique<GpuDecompressionPipelineManager>(m_vk, *pDevice);
1307         INFO("Created new VkDevice. ASTC emulation? %d. CPU decoding? %d",
1308              deviceInfo.emulateTextureAstc, deviceInfo.useAstcCpuDecompression);
1309 
1310         for (uint32_t i = 0; i < createInfoFiltered.enabledExtensionCount; ++i) {
1311             deviceInfo.enabledExtensionNames.push_back(
1312                 createInfoFiltered.ppEnabledExtensionNames[i]);
1313         }
1314 
1315         // First, get the dispatch table.
1316         VkDevice boxed = new_boxed_VkDevice(*pDevice, nullptr, true /* own dispatch */);
1317 
1318         if (mLogging) {
1319             fprintf(stderr, "%s: init vulkan dispatch from device\n", __func__);
1320         }
1321 
1322         VulkanDispatch* dispatch = dispatch_VkDevice(boxed);
1323         init_vulkan_dispatch_from_device(vk, *pDevice, dispatch);
1324         if (m_emu->debugUtilsAvailableAndRequested) {
1325             deviceInfo.debugUtilsHelper = DebugUtilsHelper::withUtilsEnabled(*pDevice, dispatch);
1326         }
1327 
1328         deviceInfo.externalFencePool =
1329             std::make_unique<ExternalFencePool<VulkanDispatch>>(dispatch, *pDevice);
1330 
1331         if (mLogging) {
1332             fprintf(stderr, "%s: init vulkan dispatch from device (end)\n", __func__);
1333         }
1334 
1335         deviceInfo.boxed = boxed;
1336 
1337         // Next, get information about the queue families used by this device.
1338         std::unordered_map<uint32_t, uint32_t> queueFamilyIndexCounts;
1339         for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
1340             const auto& queueCreateInfo = pCreateInfo->pQueueCreateInfos[i];
1341             // Check only queues created with flags = 0 in VkDeviceQueueCreateInfo.
1342             auto flags = queueCreateInfo.flags;
1343             if (flags) continue;
1344             uint32_t queueFamilyIndex = queueCreateInfo.queueFamilyIndex;
1345             uint32_t queueCount = queueCreateInfo.queueCount;
1346             queueFamilyIndexCounts[queueFamilyIndex] = queueCount;
1347         }
1348 
1349         for (auto it : queueFamilyIndexCounts) {
1350             auto index = it.first;
1351             auto count = it.second;
1352             auto& queues = deviceInfo.queues[index];
1353             for (uint32_t i = 0; i < count; ++i) {
1354                 VkQueue queueOut;
1355 
1356                 if (mLogging) {
1357                     fprintf(stderr, "%s: get device queue (begin)\n", __func__);
1358                 }
1359 
1360                 vk->vkGetDeviceQueue(*pDevice, index, i, &queueOut);
1361 
1362                 if (mLogging) {
1363                     fprintf(stderr, "%s: get device queue (end)\n", __func__);
1364                 }
1365                 queues.push_back(queueOut);
1366                 mQueueInfo[queueOut].device = *pDevice;
1367                 mQueueInfo[queueOut].queueFamilyIndex = index;
1368 
1369                 auto boxed = new_boxed_VkQueue(queueOut, dispatch_VkDevice(deviceInfo.boxed),
1370                                                false /* does not own dispatch */);
1371                 mQueueInfo[queueOut].boxed = boxed;
1372                 mQueueInfo[queueOut].lock = new Lock;
1373             }
1374         }
1375 
1376         // Box the device.
1377         *pDevice = (VkDevice)deviceInfo.boxed;
1378 
1379         if (mLogging) {
1380             fprintf(stderr, "%s: (end)\n", __func__);
1381         }
1382 
1383         return VK_SUCCESS;
1384     }
1385 
on_vkGetDeviceQueue(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)1386     void on_vkGetDeviceQueue(android::base::BumpPool* pool, VkDevice boxed_device,
1387                              uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue) {
1388         auto device = unbox_VkDevice(boxed_device);
1389 
1390         std::lock_guard<std::recursive_mutex> lock(mLock);
1391 
1392         *pQueue = VK_NULL_HANDLE;
1393 
1394         auto* deviceInfo = android::base::find(mDeviceInfo, device);
1395         if (!deviceInfo) return;
1396 
1397         const auto& queues = deviceInfo->queues;
1398 
1399         const auto* queueList = android::base::find(queues, queueFamilyIndex);
1400         if (!queueList) return;
1401         if (queueIndex >= queueList->size()) return;
1402 
1403         VkQueue unboxedQueue = (*queueList)[queueIndex];
1404 
1405         auto* queueInfo = android::base::find(mQueueInfo, unboxedQueue);
1406         if (!queueInfo) return;
1407 
1408         *pQueue = (VkQueue)queueInfo->boxed;
1409     }
1410 
on_vkGetDeviceQueue2(android::base::BumpPool * pool,VkDevice boxed_device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)1411     void on_vkGetDeviceQueue2(android::base::BumpPool* pool, VkDevice boxed_device,
1412                               const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue) {
1413         // Protected memory is not supported on emulators. So we should
1414         // not return any queue if a client requests a protected device
1415         // queue.
1416         if (pQueueInfo->flags & VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT) {
1417             *pQueue = VK_NULL_HANDLE;
1418             fprintf(stderr, "%s: Cannot get protected Vulkan device queue\n", __func__);
1419             return;
1420         }
1421         uint32_t queueFamilyIndex = pQueueInfo->queueFamilyIndex;
1422         uint32_t queueIndex = pQueueInfo->queueIndex;
1423         on_vkGetDeviceQueue(pool, boxed_device, queueFamilyIndex, queueIndex, pQueue);
1424     }
1425 
destroyDeviceLocked(VkDevice device,const VkAllocationCallbacks * pAllocator)1426     void destroyDeviceLocked(VkDevice device, const VkAllocationCallbacks* pAllocator) {
1427         auto* deviceInfo = android::base::find(mDeviceInfo, device);
1428         if (!deviceInfo) return;
1429 
1430         deviceInfo->decompPipelines->clear();
1431 
1432         auto eraseIt = mQueueInfo.begin();
1433         for (; eraseIt != mQueueInfo.end();) {
1434             if (eraseIt->second.device == device) {
1435                 delete eraseIt->second.lock;
1436                 delete_VkQueue(eraseIt->second.boxed);
1437                 eraseIt = mQueueInfo.erase(eraseIt);
1438             } else {
1439                 ++eraseIt;
1440             }
1441         }
1442 
1443         VulkanDispatch* deviceDispatch = dispatch_VkDevice(deviceInfo->boxed);
1444 
1445         // Destroy pooled external fences
1446         auto deviceFences = deviceInfo->externalFencePool->popAll();
1447         for (auto fence : deviceFences) {
1448             deviceDispatch->vkDestroyFence(device, fence, pAllocator);
1449             mFenceInfo.erase(fence);
1450         }
1451 
1452         for (auto fence : findDeviceObjects(device, mFenceInfo)) {
1453             deviceDispatch->vkDestroyFence(device, fence, pAllocator);
1454             mFenceInfo.erase(fence);
1455         }
1456 
1457         // Run the underlying API call.
1458         m_vk->vkDestroyDevice(device, pAllocator);
1459 
1460         delete_VkDevice(deviceInfo->boxed);
1461     }
1462 
on_vkDestroyDevice(android::base::BumpPool * pool,VkDevice boxed_device,const VkAllocationCallbacks * pAllocator)1463     void on_vkDestroyDevice(android::base::BumpPool* pool, VkDevice boxed_device,
1464                             const VkAllocationCallbacks* pAllocator) {
1465         auto device = unbox_VkDevice(boxed_device);
1466 
1467         std::lock_guard<std::recursive_mutex> lock(mLock);
1468 
1469         sBoxedHandleManager.processDelayedRemovesGlobalStateLocked(device);
1470         destroyDeviceLocked(device, pAllocator);
1471 
1472         mDeviceInfo.erase(device);
1473         mDeviceToPhysicalDevice.erase(device);
1474     }
1475 
on_vkCreateBuffer(android::base::BumpPool * pool,VkDevice boxed_device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)1476     VkResult on_vkCreateBuffer(android::base::BumpPool* pool, VkDevice boxed_device,
1477                                const VkBufferCreateInfo* pCreateInfo,
1478                                const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) {
1479         auto device = unbox_VkDevice(boxed_device);
1480         auto vk = dispatch_VkDevice(boxed_device);
1481 
1482         VkResult result = vk->vkCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
1483 
1484         if (result == VK_SUCCESS) {
1485             std::lock_guard<std::recursive_mutex> lock(mLock);
1486             auto& bufInfo = mBufferInfo[*pBuffer];
1487             bufInfo.device = device;
1488             bufInfo.size = pCreateInfo->size;
1489             *pBuffer = new_boxed_non_dispatchable_VkBuffer(*pBuffer);
1490         }
1491 
1492         return result;
1493     }
1494 
on_vkDestroyBuffer(android::base::BumpPool * pool,VkDevice boxed_device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)1495     void on_vkDestroyBuffer(android::base::BumpPool* pool, VkDevice boxed_device, VkBuffer buffer,
1496                             const VkAllocationCallbacks* pAllocator) {
1497         auto device = unbox_VkDevice(boxed_device);
1498         auto vk = dispatch_VkDevice(boxed_device);
1499 
1500         vk->vkDestroyBuffer(device, buffer, pAllocator);
1501 
1502         std::lock_guard<std::recursive_mutex> lock(mLock);
1503         mBufferInfo.erase(buffer);
1504     }
1505 
setBufferMemoryBindInfoLocked(VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)1506     void setBufferMemoryBindInfoLocked(VkBuffer buffer, VkDeviceMemory memory,
1507                                        VkDeviceSize memoryOffset) {
1508         auto* bufferInfo = android::base::find(mBufferInfo, buffer);
1509         if (!bufferInfo) return;
1510         bufferInfo->memory = memory;
1511         bufferInfo->memoryOffset = memoryOffset;
1512     }
1513 
on_vkBindBufferMemory(android::base::BumpPool * pool,VkDevice boxed_device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)1514     VkResult on_vkBindBufferMemory(android::base::BumpPool* pool, VkDevice boxed_device,
1515                                    VkBuffer buffer, VkDeviceMemory memory,
1516                                    VkDeviceSize memoryOffset) {
1517         auto device = unbox_VkDevice(boxed_device);
1518         auto vk = dispatch_VkDevice(boxed_device);
1519 
1520         VALIDATE_REQUIRED_HANDLE(memory);
1521         VkResult result = vk->vkBindBufferMemory(device, buffer, memory, memoryOffset);
1522 
1523         if (result == VK_SUCCESS) {
1524             std::lock_guard<std::recursive_mutex> lock(mLock);
1525             setBufferMemoryBindInfoLocked(buffer, memory, memoryOffset);
1526         }
1527         return result;
1528     }
1529 
on_vkBindBufferMemory2(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)1530     VkResult on_vkBindBufferMemory2(android::base::BumpPool* pool, VkDevice boxed_device,
1531                                     uint32_t bindInfoCount,
1532                                     const VkBindBufferMemoryInfo* pBindInfos) {
1533         auto device = unbox_VkDevice(boxed_device);
1534         auto vk = dispatch_VkDevice(boxed_device);
1535 
1536         for (uint32_t i = 0; i < bindInfoCount; ++i) {
1537             VALIDATE_REQUIRED_HANDLE(pBindInfos[i].memory);
1538         }
1539         VkResult result = vk->vkBindBufferMemory2(device, bindInfoCount, pBindInfos);
1540 
1541         if (result == VK_SUCCESS) {
1542             std::lock_guard<std::recursive_mutex> lock(mLock);
1543             for (uint32_t i = 0; i < bindInfoCount; ++i) {
1544                 setBufferMemoryBindInfoLocked(pBindInfos[i].buffer, pBindInfos[i].memory,
1545                                               pBindInfos[i].memoryOffset);
1546             }
1547         }
1548 
1549         return result;
1550     }
1551 
on_vkBindBufferMemory2KHR(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)1552     VkResult on_vkBindBufferMemory2KHR(android::base::BumpPool* pool, VkDevice boxed_device,
1553                                        uint32_t bindInfoCount,
1554                                        const VkBindBufferMemoryInfo* pBindInfos) {
1555         auto device = unbox_VkDevice(boxed_device);
1556         auto vk = dispatch_VkDevice(boxed_device);
1557 
1558         for (uint32_t i = 0; i < bindInfoCount; ++i) {
1559             VALIDATE_REQUIRED_HANDLE(pBindInfos[i].memory);
1560         }
1561         VkResult result = vk->vkBindBufferMemory2KHR(device, bindInfoCount, pBindInfos);
1562 
1563         if (result == VK_SUCCESS) {
1564             std::lock_guard<std::recursive_mutex> lock(mLock);
1565             for (uint32_t i = 0; i < bindInfoCount; ++i) {
1566                 setBufferMemoryBindInfoLocked(pBindInfos[i].buffer, pBindInfos[i].memory,
1567                                               pBindInfos[i].memoryOffset);
1568             }
1569         }
1570 
1571         return result;
1572     }
1573 
on_vkCreateImage(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)1574     VkResult on_vkCreateImage(android::base::BumpPool* pool, VkDevice boxed_device,
1575                               const VkImageCreateInfo* pCreateInfo,
1576                               const VkAllocationCallbacks* pAllocator, VkImage* pImage) {
1577         auto device = unbox_VkDevice(boxed_device);
1578         auto vk = dispatch_VkDevice(boxed_device);
1579 
1580         std::lock_guard<std::recursive_mutex> lock(mLock);
1581 
1582         auto* deviceInfo = android::base::find(mDeviceInfo, device);
1583         if (!deviceInfo) {
1584             return VK_ERROR_OUT_OF_HOST_MEMORY;
1585         }
1586 
1587         if (deviceInfo->imageFormats.find(pCreateInfo->format) == deviceInfo->imageFormats.end()) {
1588             INFO("gfxstream_texture_format_manifest: %s", string_VkFormat(pCreateInfo->format));
1589             deviceInfo->imageFormats.insert(pCreateInfo->format);
1590         }
1591 
1592         const bool needDecompression = deviceInfo->needEmulatedDecompression(pCreateInfo->format);
1593         CompressedImageInfo cmpInfo =
1594             needDecompression
1595                 ? CompressedImageInfo(device, *pCreateInfo, deviceInfo->decompPipelines.get())
1596                 : CompressedImageInfo(device);
1597         VkImageCreateInfo decompInfo;
1598         if (needDecompression) {
1599             decompInfo = cmpInfo.getOutputCreateInfo(*pCreateInfo);
1600             pCreateInfo = &decompInfo;
1601         }
1602 
1603         auto anbInfo = std::make_unique<AndroidNativeBufferInfo>();
1604         const VkNativeBufferANDROID* nativeBufferANDROID =
1605             vk_find_struct<VkNativeBufferANDROID>(pCreateInfo);
1606 
1607         VkResult createRes = VK_SUCCESS;
1608 
1609         if (nativeBufferANDROID) {
1610             auto memProps = memPropsOfDeviceLocked(device);
1611 
1612             createRes =
1613                 prepareAndroidNativeBufferImage(vk, device, *pool, pCreateInfo, nativeBufferANDROID,
1614                                                 pAllocator, memProps, anbInfo.get());
1615             if (createRes == VK_SUCCESS) {
1616                 *pImage = anbInfo->image;
1617             }
1618         } else {
1619             createRes = vk->vkCreateImage(device, pCreateInfo, pAllocator, pImage);
1620         }
1621 
1622         if (createRes != VK_SUCCESS) return createRes;
1623 
1624         if (needDecompression) {
1625             cmpInfo.setOutputImage(*pImage);
1626             cmpInfo.createCompressedMipmapImages(vk, *pCreateInfo);
1627 
1628             if (cmpInfo.isAstc()) {
1629                 if (deviceInfo->useAstcCpuDecompression) {
1630                     cmpInfo.initAstcCpuDecompression(m_vk, mDeviceInfo[device].physicalDevice);
1631                 }
1632             }
1633         }
1634 
1635         auto& imageInfo = mImageInfo[*pImage];
1636         imageInfo.device = device;
1637         imageInfo.cmpInfo = std::move(cmpInfo);
1638         imageInfo.imageCreateInfoShallow = vk_make_orphan_copy(*pCreateInfo);
1639         if (nativeBufferANDROID) imageInfo.anbInfo = std::move(anbInfo);
1640 
1641         *pImage = new_boxed_non_dispatchable_VkImage(*pImage);
1642         return createRes;
1643     }
1644 
destroyImageLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkImage image,const VkAllocationCallbacks * pAllocator)1645     void destroyImageLocked(VkDevice device, VulkanDispatch* deviceDispatch, VkImage image,
1646                             const VkAllocationCallbacks* pAllocator) {
1647         auto* imageInfo = android::base::find(mImageInfo, image);
1648         if (!imageInfo) return;
1649 
1650         if (!imageInfo->anbInfo) {
1651             imageInfo->cmpInfo.destroy(deviceDispatch);
1652             if (image != imageInfo->cmpInfo.outputImage()) {
1653                 deviceDispatch->vkDestroyImage(device, image, pAllocator);
1654             }
1655         }
1656         mImageInfo.erase(image);
1657     }
1658 
on_vkDestroyImage(android::base::BumpPool * pool,VkDevice boxed_device,VkImage image,const VkAllocationCallbacks * pAllocator)1659     void on_vkDestroyImage(android::base::BumpPool* pool, VkDevice boxed_device, VkImage image,
1660                            const VkAllocationCallbacks* pAllocator) {
1661         auto device = unbox_VkDevice(boxed_device);
1662         auto deviceDispatch = dispatch_VkDevice(boxed_device);
1663 
1664         std::lock_guard<std::recursive_mutex> lock(mLock);
1665         destroyImageLocked(device, deviceDispatch, image, pAllocator);
1666     }
1667 
performBindImageMemoryDeferredAhb(android::base::BumpPool * pool,VkDevice boxed_device,const VkBindImageMemoryInfo * bimi)1668     VkResult performBindImageMemoryDeferredAhb(android::base::BumpPool* pool,
1669                                                VkDevice boxed_device,
1670                                                const VkBindImageMemoryInfo* bimi) {
1671         auto device = unbox_VkDevice(boxed_device);
1672         auto vk = dispatch_VkDevice(boxed_device);
1673 
1674         auto original_underlying_image = bimi->image;
1675         auto original_boxed_image = unboxed_to_boxed_non_dispatchable_VkImage(original_underlying_image);
1676 
1677         VkImageCreateInfo ici = {};
1678         {
1679             std::lock_guard<std::recursive_mutex> lock(mLock);
1680 
1681             auto* imageInfo = android::base::find(mImageInfo, original_underlying_image);
1682             if (!imageInfo) {
1683                 ERR("Image for deferred AHB bind does not exist.");
1684                 return VK_ERROR_OUT_OF_HOST_MEMORY;
1685             }
1686 
1687             ici = imageInfo->imageCreateInfoShallow;
1688         }
1689 
1690         ici.pNext = vk_find_struct<VkNativeBufferANDROID>(bimi);
1691         if (!ici.pNext) {
1692             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
1693                 << "Missing VkNativeBufferANDROID for deferred AHB bind.";
1694         }
1695 
1696         VkImage boxed_replacement_image = VK_NULL_HANDLE;
1697         VkResult result = on_vkCreateImage(pool, boxed_device, &ici, nullptr, &boxed_replacement_image);
1698         if (result != VK_SUCCESS) {
1699             ERR("Failed to create image for deferred AHB bind.");
1700             return VK_ERROR_OUT_OF_HOST_MEMORY;
1701         }
1702 
1703         on_vkDestroyImage(pool, boxed_device, original_underlying_image, nullptr);
1704 
1705         {
1706             std::lock_guard<std::recursive_mutex> lock(mLock);
1707 
1708             auto underlying_replacement_image = unbox_VkImage(boxed_replacement_image);
1709             delete_VkImage(boxed_replacement_image);
1710             set_boxed_non_dispatchable_VkImage(original_boxed_image, underlying_replacement_image);
1711         }
1712 
1713         return VK_SUCCESS;
1714     }
1715 
performBindImageMemory(android::base::BumpPool * pool,VkDevice boxed_device,const VkBindImageMemoryInfo * bimi)1716     VkResult performBindImageMemory(android::base::BumpPool* pool, VkDevice boxed_device,
1717                                     const VkBindImageMemoryInfo* bimi) {
1718         auto image = bimi->image;
1719         auto memory = bimi->memory;
1720         auto memoryOffset = bimi->memoryOffset;
1721 
1722         const auto* anb = vk_find_struct<VkNativeBufferANDROID>(bimi);
1723         if (memory == VK_NULL_HANDLE && anb != nullptr) {
1724             return performBindImageMemoryDeferredAhb(pool, boxed_device, bimi);
1725         }
1726 
1727         auto device = unbox_VkDevice(boxed_device);
1728         auto vk = dispatch_VkDevice(boxed_device);
1729 
1730         VALIDATE_REQUIRED_HANDLE(memory);
1731         VkResult result = vk->vkBindImageMemory(device, image, memory, memoryOffset);
1732         if (result != VK_SUCCESS) {
1733             return result;
1734         }
1735 
1736         std::lock_guard<std::recursive_mutex> lock(mLock);
1737 
1738         auto* deviceInfo = android::base::find(mDeviceInfo, device);
1739         if (!deviceInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
1740 
1741         auto* memoryInfo = android::base::find(mMemoryInfo, memory);
1742         if (!memoryInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
1743 
1744         auto* imageInfo = android::base::find(mImageInfo, image);
1745         if (imageInfo) {
1746             imageInfo->boundColorBuffer = memoryInfo->boundColorBuffer;
1747         }
1748 
1749 #if defined(__APPLE__) && defined(VK_MVK_moltenvk)
1750         if (memoryInfo->mtlTexture) {
1751             result = m_vk->vkSetMTLTextureMVK(image, memoryInfo->mtlTexture);
1752             if (result != VK_SUCCESS) {
1753                 fprintf(stderr, "vkSetMTLTexture failed\n");
1754                 return VK_ERROR_OUT_OF_HOST_MEMORY;
1755             }
1756         }
1757 #endif
1758         if (!deviceInfo->emulateTextureEtc2 && !deviceInfo->emulateTextureAstc) {
1759             return VK_SUCCESS;
1760         }
1761 
1762         if (!imageInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
1763 
1764         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
1765         if (!deviceInfo->needEmulatedDecompression(cmpInfo)) {
1766             return VK_SUCCESS;
1767         }
1768         return cmpInfo.bindCompressedMipmapsMemory(vk, memory, memoryOffset);
1769     }
1770 
on_vkBindImageMemory(android::base::BumpPool * pool,VkDevice boxed_device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)1771     VkResult on_vkBindImageMemory(android::base::BumpPool* pool, VkDevice boxed_device,
1772                                   VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) {
1773         const VkBindImageMemoryInfo bimi = {
1774             .sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
1775             .pNext = nullptr,
1776             .image = image,
1777             .memory = memory,
1778             .memoryOffset = memoryOffset,
1779         };
1780         return performBindImageMemory(pool, boxed_device, &bimi);
1781     }
1782 
on_vkBindImageMemory2(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)1783     VkResult on_vkBindImageMemory2(android::base::BumpPool* pool, VkDevice boxed_device,
1784                                    uint32_t bindInfoCount,
1785                                    const VkBindImageMemoryInfo* pBindInfos) {
1786         auto device = unbox_VkDevice(boxed_device);
1787         auto vk = dispatch_VkDevice(boxed_device);
1788         bool needEmulation = false;
1789 
1790         auto* deviceInfo = android::base::find(mDeviceInfo, device);
1791         if (!deviceInfo) return VK_ERROR_UNKNOWN;
1792 
1793         for (uint32_t i = 0; i < bindInfoCount; i++) {
1794             auto* imageInfo = android::base::find(mImageInfo, pBindInfos[i].image);
1795             if (!imageInfo) return VK_ERROR_UNKNOWN;
1796 
1797             const auto* anb = vk_find_struct<VkNativeBufferANDROID>(&pBindInfos[i]);
1798             if (anb != nullptr) {
1799                 needEmulation = true;
1800                 break;
1801             }
1802 
1803             if (deviceInfo->needEmulatedDecompression(imageInfo->cmpInfo)) {
1804                 needEmulation = true;
1805                 break;
1806             }
1807         }
1808 
1809         if (needEmulation) {
1810             VkResult result;
1811             for (uint32_t i = 0; i < bindInfoCount; i++) {
1812                 result = performBindImageMemory(pool, boxed_device, &pBindInfos[i]);
1813                 if (result != VK_SUCCESS) return result;
1814             }
1815 
1816             return VK_SUCCESS;
1817         }
1818 
1819         return vk->vkBindImageMemory2(device, bindInfoCount, pBindInfos);
1820     }
1821 
on_vkCreateImageView(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)1822     VkResult on_vkCreateImageView(android::base::BumpPool* pool, VkDevice boxed_device,
1823                                   const VkImageViewCreateInfo* pCreateInfo,
1824                                   const VkAllocationCallbacks* pAllocator, VkImageView* pView) {
1825         auto device = unbox_VkDevice(boxed_device);
1826         auto vk = dispatch_VkDevice(boxed_device);
1827 
1828         if (!pCreateInfo) {
1829             return VK_ERROR_OUT_OF_HOST_MEMORY;
1830         }
1831 
1832         std::lock_guard<std::recursive_mutex> lock(mLock);
1833         auto* deviceInfo = android::base::find(mDeviceInfo, device);
1834         auto* imageInfo = android::base::find(mImageInfo, pCreateInfo->image);
1835         if (!deviceInfo || !imageInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
1836         VkImageViewCreateInfo createInfo;
1837         bool needEmulatedAlpha = false;
1838         if (deviceInfo->needEmulatedDecompression(pCreateInfo->format)) {
1839             if (imageInfo->cmpInfo.outputImage()) {
1840                 createInfo = *pCreateInfo;
1841                 createInfo.format = CompressedImageInfo::getOutputFormat(pCreateInfo->format);
1842                 needEmulatedAlpha = CompressedImageInfo::needEmulatedAlpha(pCreateInfo->format);
1843                 createInfo.image = imageInfo->cmpInfo.outputImage();
1844                 pCreateInfo = &createInfo;
1845             }
1846         } else if (deviceInfo->needEmulatedDecompression(imageInfo->cmpInfo)) {
1847             // Image view on the compressed mipmaps
1848             createInfo = *pCreateInfo;
1849             createInfo.format =
1850                 CompressedImageInfo::getCompressedMipmapsFormat(pCreateInfo->format);
1851             needEmulatedAlpha = false;
1852             createInfo.image =
1853                 imageInfo->cmpInfo.compressedMipmap(pCreateInfo->subresourceRange.baseMipLevel);
1854             createInfo.subresourceRange.baseMipLevel = 0;
1855             pCreateInfo = &createInfo;
1856         }
1857         if (imageInfo->anbInfo && imageInfo->anbInfo->externallyBacked) {
1858             createInfo = *pCreateInfo;
1859             pCreateInfo = &createInfo;
1860         }
1861 
1862         VkResult result = vk->vkCreateImageView(device, pCreateInfo, pAllocator, pView);
1863         if (result != VK_SUCCESS) {
1864             return result;
1865         }
1866 
1867         auto& imageViewInfo = mImageViewInfo[*pView];
1868         imageViewInfo.device = device;
1869         imageViewInfo.needEmulatedAlpha = needEmulatedAlpha;
1870         imageViewInfo.boundColorBuffer = imageInfo->boundColorBuffer;
1871 
1872         *pView = new_boxed_non_dispatchable_VkImageView(*pView);
1873 
1874         return result;
1875     }
1876 
on_vkDestroyImageView(android::base::BumpPool * pool,VkDevice boxed_device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)1877     void on_vkDestroyImageView(android::base::BumpPool* pool, VkDevice boxed_device,
1878                                VkImageView imageView, const VkAllocationCallbacks* pAllocator) {
1879         auto device = unbox_VkDevice(boxed_device);
1880         auto vk = dispatch_VkDevice(boxed_device);
1881 
1882         vk->vkDestroyImageView(device, imageView, pAllocator);
1883         std::lock_guard<std::recursive_mutex> lock(mLock);
1884         mImageViewInfo.erase(imageView);
1885     }
1886 
on_vkCreateSampler(android::base::BumpPool * pool,VkDevice boxed_device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)1887     VkResult on_vkCreateSampler(android::base::BumpPool* pool, VkDevice boxed_device,
1888                                 const VkSamplerCreateInfo* pCreateInfo,
1889                                 const VkAllocationCallbacks* pAllocator, VkSampler* pSampler) {
1890         auto device = unbox_VkDevice(boxed_device);
1891         auto vk = dispatch_VkDevice(boxed_device);
1892         VkResult result = vk->vkCreateSampler(device, pCreateInfo, pAllocator, pSampler);
1893         if (result != VK_SUCCESS) {
1894             return result;
1895         }
1896         std::lock_guard<std::recursive_mutex> lock(mLock);
1897         auto& samplerInfo = mSamplerInfo[*pSampler];
1898         samplerInfo.device = device;
1899         deepcopy_VkSamplerCreateInfo(&samplerInfo.pool, VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
1900                                      pCreateInfo, &samplerInfo.createInfo);
1901         // We emulate RGB with RGBA for some compressed textures, which does not
1902         // handle translarent border correctly.
1903         samplerInfo.needEmulatedAlpha =
1904             (pCreateInfo->addressModeU == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER ||
1905              pCreateInfo->addressModeV == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER ||
1906              pCreateInfo->addressModeW == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER) &&
1907             (pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK ||
1908              pCreateInfo->borderColor == VK_BORDER_COLOR_INT_TRANSPARENT_BLACK ||
1909              pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT ||
1910              pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT);
1911 
1912         *pSampler = new_boxed_non_dispatchable_VkSampler(*pSampler);
1913 
1914         return result;
1915     }
1916 
destroySamplerLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkSampler sampler,const VkAllocationCallbacks * pAllocator)1917     void destroySamplerLocked(VkDevice device, VulkanDispatch* deviceDispatch, VkSampler sampler,
1918                               const VkAllocationCallbacks* pAllocator) {
1919         deviceDispatch->vkDestroySampler(device, sampler, pAllocator);
1920 
1921         auto* samplerInfo = android::base::find(mSamplerInfo, sampler);
1922         if (!samplerInfo) return;
1923 
1924         if (samplerInfo->emulatedborderSampler != VK_NULL_HANDLE) {
1925             deviceDispatch->vkDestroySampler(device, samplerInfo->emulatedborderSampler, nullptr);
1926         }
1927         mSamplerInfo.erase(sampler);
1928     }
1929 
on_vkDestroySampler(android::base::BumpPool * pool,VkDevice boxed_device,VkSampler sampler,const VkAllocationCallbacks * pAllocator)1930     void on_vkDestroySampler(android::base::BumpPool* pool, VkDevice boxed_device,
1931                              VkSampler sampler, const VkAllocationCallbacks* pAllocator) {
1932         auto device = unbox_VkDevice(boxed_device);
1933         auto deviceDispatch = dispatch_VkDevice(boxed_device);
1934 
1935         std::lock_guard<std::recursive_mutex> lock(mLock);
1936         destroySamplerLocked(device, deviceDispatch, sampler, pAllocator);
1937     }
1938 
on_vkCreateSemaphore(android::base::BumpPool * pool,VkDevice boxed_device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)1939     VkResult on_vkCreateSemaphore(android::base::BumpPool* pool, VkDevice boxed_device,
1940                                   const VkSemaphoreCreateInfo* pCreateInfo,
1941                                   const VkAllocationCallbacks* pAllocator,
1942                                   VkSemaphore* pSemaphore) {
1943         auto device = unbox_VkDevice(boxed_device);
1944         auto vk = dispatch_VkDevice(boxed_device);
1945 
1946         VkSemaphoreCreateInfo localCreateInfo = vk_make_orphan_copy(*pCreateInfo);
1947         vk_struct_chain_iterator structChainIter = vk_make_chain_iterator(&localCreateInfo);
1948 
1949         VkSemaphoreTypeCreateInfoKHR localSemaphoreTypeCreateInfo;
1950         if (const VkSemaphoreTypeCreateInfoKHR* semaphoreTypeCiPtr =
1951                 vk_find_struct<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo);
1952             semaphoreTypeCiPtr) {
1953             localSemaphoreTypeCreateInfo = vk_make_orphan_copy(*semaphoreTypeCiPtr);
1954             vk_append_struct(&structChainIter, &localSemaphoreTypeCreateInfo);
1955         }
1956 
1957         const VkExportSemaphoreCreateInfoKHR* exportCiPtr =
1958             vk_find_struct<VkExportSemaphoreCreateInfoKHR>(pCreateInfo);
1959         VkExportSemaphoreCreateInfoKHR localSemaphoreCreateInfo;
1960 
1961         if (exportCiPtr) {
1962             localSemaphoreCreateInfo = vk_make_orphan_copy(*exportCiPtr);
1963 
1964 #ifdef _WIN32
1965             if (localSemaphoreCreateInfo.handleTypes) {
1966                 localSemaphoreCreateInfo.handleTypes =
1967                     VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR;
1968             }
1969 #endif
1970 
1971             vk_append_struct(&structChainIter, &localSemaphoreCreateInfo);
1972         }
1973 
1974         VkResult res = vk->vkCreateSemaphore(device, &localCreateInfo, pAllocator, pSemaphore);
1975 
1976         if (res != VK_SUCCESS) return res;
1977 
1978         std::lock_guard<std::recursive_mutex> lock(mLock);
1979 
1980         auto& semaphoreInfo = mSemaphoreInfo[*pSemaphore];
1981         semaphoreInfo.device = device;
1982 
1983         *pSemaphore = new_boxed_non_dispatchable_VkSemaphore(*pSemaphore);
1984 
1985         return res;
1986     }
1987 
on_vkCreateFence(android::base::BumpPool * pool,VkDevice boxed_device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)1988     VkResult on_vkCreateFence(android::base::BumpPool* pool, VkDevice boxed_device,
1989                               const VkFenceCreateInfo* pCreateInfo,
1990                               const VkAllocationCallbacks* pAllocator, VkFence* pFence) {
1991         auto device = unbox_VkDevice(boxed_device);
1992         auto vk = dispatch_VkDevice(boxed_device);
1993 
1994         VkFenceCreateInfo& createInfo = const_cast<VkFenceCreateInfo&>(*pCreateInfo);
1995 
1996         const VkExportFenceCreateInfo* exportFenceInfoPtr =
1997             vk_find_struct<VkExportFenceCreateInfo>(pCreateInfo);
1998         bool exportSyncFd = exportFenceInfoPtr && (exportFenceInfoPtr->handleTypes &
1999                                                    VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT);
2000         bool fenceReused = false;
2001 
2002         *pFence = VK_NULL_HANDLE;
2003 
2004         if (exportSyncFd) {
2005             // Remove VkExportFenceCreateInfo, since host doesn't need to create
2006             // an exportable fence in this case
2007             ExternalFencePool<VulkanDispatch>* externalFencePool = nullptr;
2008             vk_struct_chain_remove(exportFenceInfoPtr, &createInfo);
2009             {
2010                 std::lock_guard<std::recursive_mutex> lock(mLock);
2011                 auto* deviceInfo = android::base::find(mDeviceInfo, device);
2012                 if (!deviceInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
2013                 externalFencePool = deviceInfo->externalFencePool.get();
2014             }
2015             *pFence = externalFencePool->pop(pCreateInfo);
2016             if (*pFence != VK_NULL_HANDLE) {
2017                 fenceReused = true;
2018             }
2019         }
2020 
2021         if (*pFence == VK_NULL_HANDLE) {
2022             VkResult res = vk->vkCreateFence(device, &createInfo, pAllocator, pFence);
2023             if (res != VK_SUCCESS) {
2024                 return res;
2025             }
2026         }
2027 
2028         {
2029             std::lock_guard<std::recursive_mutex> lock(mLock);
2030 
2031             DCHECK(fenceReused || mFenceInfo.find(*pFence) == mFenceInfo.end());
2032             // Create FenceInfo for *pFence.
2033             auto& fenceInfo = mFenceInfo[*pFence];
2034             fenceInfo.device = device;
2035             fenceInfo.vk = vk;
2036 
2037             *pFence = new_boxed_non_dispatchable_VkFence(*pFence);
2038             fenceInfo.boxed = *pFence;
2039             fenceInfo.external = exportSyncFd;
2040             fenceInfo.state = FenceInfo::State::kNotWaitable;
2041         }
2042 
2043         return VK_SUCCESS;
2044     }
2045 
on_vkResetFences(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t fenceCount,const VkFence * pFences)2046     VkResult on_vkResetFences(android::base::BumpPool* pool, VkDevice boxed_device,
2047                               uint32_t fenceCount, const VkFence* pFences) {
2048         auto device = unbox_VkDevice(boxed_device);
2049         auto vk = dispatch_VkDevice(boxed_device);
2050 
2051         std::vector<VkFence> cleanedFences;
2052         std::vector<VkFence> externalFences;
2053 
2054         {
2055             std::lock_guard<std::recursive_mutex> lock(mLock);
2056             for (uint32_t i = 0; i < fenceCount; i++) {
2057                 if (pFences[i] == VK_NULL_HANDLE) continue;
2058 
2059                 DCHECK(mFenceInfo.find(pFences[i]) != mFenceInfo.end());
2060                 if (mFenceInfo[pFences[i]].external) {
2061                     externalFences.push_back(pFences[i]);
2062                 } else {
2063                     // Reset all fences' states to kNotWaitable.
2064                     cleanedFences.push_back(pFences[i]);
2065                     mFenceInfo[pFences[i]].state = FenceInfo::State::kNotWaitable;
2066                 }
2067             }
2068         }
2069 
2070         if (!cleanedFences.empty()) {
2071             VK_CHECK(vk->vkResetFences(device, (uint32_t)cleanedFences.size(),
2072                                        cleanedFences.data()));
2073         }
2074 
2075         // For external fences, we unilaterally put them in the pool to ensure they finish
2076         // TODO: should store creation info / pNext chain per fence and re-apply?
2077         VkFenceCreateInfo createInfo{
2078             .sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, .pNext = 0, .flags = 0};
2079         auto* deviceInfo = android::base::find(mDeviceInfo, device);
2080         if (!deviceInfo) return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2081         for (auto fence : externalFences) {
2082             VkFence replacement = deviceInfo->externalFencePool->pop(&createInfo);
2083             if (replacement == VK_NULL_HANDLE) {
2084                 VK_CHECK(vk->vkCreateFence(device, &createInfo, 0, &replacement));
2085             }
2086             deviceInfo->externalFencePool->add(fence);
2087 
2088             {
2089                 std::lock_guard<std::recursive_mutex> lock(mLock);
2090                 auto boxed_fence = unboxed_to_boxed_non_dispatchable_VkFence(fence);
2091                 delete_VkFence(boxed_fence);
2092                 set_boxed_non_dispatchable_VkFence(boxed_fence, replacement);
2093 
2094                 auto& fenceInfo = mFenceInfo[replacement];
2095                 fenceInfo.device = device;
2096                 fenceInfo.vk = vk;
2097                 fenceInfo.boxed = boxed_fence;
2098                 fenceInfo.external = true;
2099                 fenceInfo.state = FenceInfo::State::kNotWaitable;
2100 
2101                 mFenceInfo[fence].boxed = VK_NULL_HANDLE;
2102             }
2103         }
2104 
2105         return VK_SUCCESS;
2106     }
2107 
on_vkImportSemaphoreFdKHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo)2108     VkResult on_vkImportSemaphoreFdKHR(android::base::BumpPool* pool, VkDevice boxed_device,
2109                                        const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
2110         auto device = unbox_VkDevice(boxed_device);
2111         auto vk = dispatch_VkDevice(boxed_device);
2112 
2113 #ifdef _WIN32
2114         std::lock_guard<std::recursive_mutex> lock(mLock);
2115 
2116         auto* infoPtr = android::base::find(mSemaphoreInfo,
2117                                             mExternalSemaphoresById[pImportSemaphoreFdInfo->fd]);
2118 
2119         if (!infoPtr) {
2120             return VK_ERROR_INVALID_EXTERNAL_HANDLE;
2121         }
2122 
2123         VK_EXT_SYNC_HANDLE handle = dupExternalSync(infoPtr->externalHandle);
2124 
2125         VkImportSemaphoreWin32HandleInfoKHR win32ImportInfo = {
2126             VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
2127             0,
2128             pImportSemaphoreFdInfo->semaphore,
2129             pImportSemaphoreFdInfo->flags,
2130             VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR,
2131             handle,
2132             L"",
2133         };
2134 
2135         return vk->vkImportSemaphoreWin32HandleKHR(device, &win32ImportInfo);
2136 #else
2137         if (!hasDeviceExtension(device, VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME)) {
2138             // Note: VK_KHR_external_semaphore_fd might be advertised in the guest,
2139             // because SYNC_FD handling is performed guest-side only. But still need
2140             // need to error out here when handling a non-sync, opaque FD.
2141             return VK_ERROR_OUT_OF_HOST_MEMORY;
2142         }
2143 
2144         VkImportSemaphoreFdInfoKHR importInfo = *pImportSemaphoreFdInfo;
2145         importInfo.fd = dup(pImportSemaphoreFdInfo->fd);
2146         return vk->vkImportSemaphoreFdKHR(device, &importInfo);
2147 #endif
2148     }
2149 
on_vkGetSemaphoreFdKHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)2150     VkResult on_vkGetSemaphoreFdKHR(android::base::BumpPool* pool, VkDevice boxed_device,
2151                                     const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd) {
2152         auto device = unbox_VkDevice(boxed_device);
2153         auto vk = dispatch_VkDevice(boxed_device);
2154 #ifdef _WIN32
2155         VkSemaphoreGetWin32HandleInfoKHR getWin32 = {
2156             VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR,
2157             0,
2158             pGetFdInfo->semaphore,
2159             VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
2160         };
2161         VK_EXT_SYNC_HANDLE handle;
2162         VkResult result = vk->vkGetSemaphoreWin32HandleKHR(device, &getWin32, &handle);
2163         if (result != VK_SUCCESS) {
2164             return result;
2165         }
2166         std::lock_guard<std::recursive_mutex> lock(mLock);
2167         mSemaphoreInfo[pGetFdInfo->semaphore].externalHandle = handle;
2168         int nextId = genSemaphoreId();
2169         mExternalSemaphoresById[nextId] = pGetFdInfo->semaphore;
2170         *pFd = nextId;
2171 #else
2172         if (!hasDeviceExtension(device, VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME)) {
2173             // Note: VK_KHR_external_semaphore_fd might be advertised in the guest,
2174             // because SYNC_FD handling is performed guest-side only. But still need
2175             // need to error out here when handling a non-sync, opaque FD.
2176             return VK_ERROR_OUT_OF_HOST_MEMORY;
2177         }
2178 
2179         VkResult result = vk->vkGetSemaphoreFdKHR(device, pGetFdInfo, pFd);
2180         if (result != VK_SUCCESS) {
2181             return result;
2182         }
2183 
2184         std::lock_guard<std::recursive_mutex> lock(mLock);
2185 
2186         mSemaphoreInfo[pGetFdInfo->semaphore].externalHandle = *pFd;
2187         // No next id; its already an fd
2188 #endif
2189         return result;
2190     }
2191 
destroySemaphoreLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)2192     void destroySemaphoreLocked(VkDevice device, VulkanDispatch* deviceDispatch,
2193                                 VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) {
2194 #ifndef _WIN32
2195         const auto& ite = mSemaphoreInfo.find(semaphore);
2196         if (ite != mSemaphoreInfo.end() &&
2197             (ite->second.externalHandle != VK_EXT_SYNC_HANDLE_INVALID)) {
2198             close(ite->second.externalHandle);
2199         }
2200 #endif
2201         deviceDispatch->vkDestroySemaphore(device, semaphore, pAllocator);
2202 
2203         mSemaphoreInfo.erase(semaphore);
2204     }
2205 
on_vkDestroySemaphore(android::base::BumpPool * pool,VkDevice boxed_device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)2206     void on_vkDestroySemaphore(android::base::BumpPool* pool, VkDevice boxed_device,
2207                                VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) {
2208         auto device = unbox_VkDevice(boxed_device);
2209         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2210 
2211         std::lock_guard<std::recursive_mutex> lock(mLock);
2212         destroySemaphoreLocked(device, deviceDispatch, semaphore, pAllocator);
2213     }
2214 
on_vkDestroyFence(android::base::BumpPool * pool,VkDevice boxed_device,VkFence fence,const VkAllocationCallbacks * pAllocator)2215     void on_vkDestroyFence(android::base::BumpPool* pool, VkDevice boxed_device, VkFence fence,
2216                            const VkAllocationCallbacks* pAllocator) {
2217         auto device = unbox_VkDevice(boxed_device);
2218         auto vk = dispatch_VkDevice(boxed_device);
2219 
2220         {
2221             std::lock_guard<std::recursive_mutex> lock(mLock);
2222             // External fences are just slated for recycling. This addresses known
2223             // behavior where the guest might destroy the fence prematurely. b/228221208
2224             if (mFenceInfo[fence].external) {
2225                 auto* deviceInfo = android::base::find(mDeviceInfo, device);
2226                 if (deviceInfo) {
2227                     deviceInfo->externalFencePool->add(fence);
2228                     mFenceInfo[fence].boxed = VK_NULL_HANDLE;
2229                     return;
2230                 }
2231             }
2232             mFenceInfo.erase(fence);
2233         }
2234 
2235         vk->vkDestroyFence(device, fence, pAllocator);
2236     }
2237 
on_vkCreateDescriptorSetLayout(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)2238     VkResult on_vkCreateDescriptorSetLayout(android::base::BumpPool* pool, VkDevice boxed_device,
2239                                             const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
2240                                             const VkAllocationCallbacks* pAllocator,
2241                                             VkDescriptorSetLayout* pSetLayout) {
2242         auto device = unbox_VkDevice(boxed_device);
2243         auto vk = dispatch_VkDevice(boxed_device);
2244 
2245         auto res = vk->vkCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
2246 
2247         if (res == VK_SUCCESS) {
2248             std::lock_guard<std::recursive_mutex> lock(mLock);
2249             auto& info = mDescriptorSetLayoutInfo[*pSetLayout];
2250             info.device = device;
2251             *pSetLayout = new_boxed_non_dispatchable_VkDescriptorSetLayout(*pSetLayout);
2252             info.boxed = *pSetLayout;
2253 
2254             info.createInfo = *pCreateInfo;
2255             for (uint32_t i = 0; i < pCreateInfo->bindingCount; ++i) {
2256                 info.bindings.push_back(pCreateInfo->pBindings[i]);
2257             }
2258         }
2259 
2260         return res;
2261     }
2262 
on_vkDestroyDescriptorSetLayout(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)2263     void on_vkDestroyDescriptorSetLayout(android::base::BumpPool* pool, VkDevice boxed_device,
2264                                          VkDescriptorSetLayout descriptorSetLayout,
2265                                          const VkAllocationCallbacks* pAllocator) {
2266         auto device = unbox_VkDevice(boxed_device);
2267         auto vk = dispatch_VkDevice(boxed_device);
2268 
2269         vk->vkDestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
2270 
2271         std::lock_guard<std::recursive_mutex> lock(mLock);
2272         mDescriptorSetLayoutInfo.erase(descriptorSetLayout);
2273     }
2274 
on_vkCreateDescriptorPool(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)2275     VkResult on_vkCreateDescriptorPool(android::base::BumpPool* pool, VkDevice boxed_device,
2276                                        const VkDescriptorPoolCreateInfo* pCreateInfo,
2277                                        const VkAllocationCallbacks* pAllocator,
2278                                        VkDescriptorPool* pDescriptorPool) {
2279         auto device = unbox_VkDevice(boxed_device);
2280         auto vk = dispatch_VkDevice(boxed_device);
2281 
2282         auto res = vk->vkCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
2283 
2284         if (res == VK_SUCCESS) {
2285             std::lock_guard<std::recursive_mutex> lock(mLock);
2286             auto& info = mDescriptorPoolInfo[*pDescriptorPool];
2287             info.device = device;
2288             *pDescriptorPool = new_boxed_non_dispatchable_VkDescriptorPool(*pDescriptorPool);
2289             info.boxed = *pDescriptorPool;
2290             info.createInfo = *pCreateInfo;
2291             info.maxSets = pCreateInfo->maxSets;
2292             info.usedSets = 0;
2293 
2294             for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; ++i) {
2295                 DescriptorPoolInfo::PoolState state;
2296                 state.type = pCreateInfo->pPoolSizes[i].type;
2297                 state.descriptorCount = pCreateInfo->pPoolSizes[i].descriptorCount;
2298                 state.used = 0;
2299                 info.pools.push_back(state);
2300             }
2301 
2302             if (feature_is_enabled(kFeature_VulkanBatchedDescriptorSetUpdate)) {
2303                 for (uint32_t i = 0; i < pCreateInfo->maxSets; ++i) {
2304                     info.poolIds.push_back(
2305                         (uint64_t)new_boxed_non_dispatchable_VkDescriptorSet(VK_NULL_HANDLE));
2306                 }
2307             }
2308         }
2309 
2310         return res;
2311     }
2312 
cleanupDescriptorPoolAllocedSetsLocked(VkDescriptorPool descriptorPool,bool isDestroy=false)2313     void cleanupDescriptorPoolAllocedSetsLocked(VkDescriptorPool descriptorPool,
2314                                                 bool isDestroy = false) {
2315         auto* info = android::base::find(mDescriptorPoolInfo, descriptorPool);
2316         if (!info) return;
2317 
2318         for (auto it : info->allocedSetsToBoxed) {
2319             auto unboxedSet = it.first;
2320             auto boxedSet = it.second;
2321             mDescriptorSetInfo.erase(unboxedSet);
2322             if (!feature_is_enabled(kFeature_VulkanBatchedDescriptorSetUpdate)) {
2323                 delete_VkDescriptorSet(boxedSet);
2324             }
2325         }
2326 
2327         if (feature_is_enabled(kFeature_VulkanBatchedDescriptorSetUpdate)) {
2328             if (isDestroy) {
2329                 for (auto poolId : info->poolIds) {
2330                     delete_VkDescriptorSet((VkDescriptorSet)poolId);
2331                 }
2332             } else {
2333                 for (auto poolId : info->poolIds) {
2334                     auto handleInfo = sBoxedHandleManager.get(poolId);
2335                     if (handleInfo)
2336                         handleInfo->underlying = reinterpret_cast<uint64_t>(VK_NULL_HANDLE);
2337                 }
2338             }
2339         }
2340 
2341         info->usedSets = 0;
2342         info->allocedSetsToBoxed.clear();
2343 
2344         for (auto& pool : info->pools) {
2345             pool.used = 0;
2346         }
2347     }
2348 
on_vkDestroyDescriptorPool(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)2349     void on_vkDestroyDescriptorPool(android::base::BumpPool* pool, VkDevice boxed_device,
2350                                     VkDescriptorPool descriptorPool,
2351                                     const VkAllocationCallbacks* pAllocator) {
2352         auto device = unbox_VkDevice(boxed_device);
2353         auto vk = dispatch_VkDevice(boxed_device);
2354 
2355         vk->vkDestroyDescriptorPool(device, descriptorPool, pAllocator);
2356 
2357         std::lock_guard<std::recursive_mutex> lock(mLock);
2358         cleanupDescriptorPoolAllocedSetsLocked(descriptorPool, true /* destroy */);
2359         mDescriptorPoolInfo.erase(descriptorPool);
2360     }
2361 
on_vkResetDescriptorPool(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)2362     VkResult on_vkResetDescriptorPool(android::base::BumpPool* pool, VkDevice boxed_device,
2363                                       VkDescriptorPool descriptorPool,
2364                                       VkDescriptorPoolResetFlags flags) {
2365         auto device = unbox_VkDevice(boxed_device);
2366         auto vk = dispatch_VkDevice(boxed_device);
2367 
2368         auto res = vk->vkResetDescriptorPool(device, descriptorPool, flags);
2369 
2370         if (res == VK_SUCCESS) {
2371             std::lock_guard<std::recursive_mutex> lock(mLock);
2372             cleanupDescriptorPoolAllocedSetsLocked(descriptorPool);
2373         }
2374 
2375         return res;
2376     }
2377 
initDescriptorSetInfoLocked(VkDescriptorPool pool,VkDescriptorSetLayout setLayout,uint64_t boxedDescriptorSet,VkDescriptorSet descriptorSet)2378     void initDescriptorSetInfoLocked(VkDescriptorPool pool, VkDescriptorSetLayout setLayout,
2379                                      uint64_t boxedDescriptorSet, VkDescriptorSet descriptorSet) {
2380         auto* poolInfo = android::base::find(mDescriptorPoolInfo, pool);
2381         if (!poolInfo) {
2382             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << "Cannot find poolInfo";
2383         }
2384 
2385         auto* setLayoutInfo = android::base::find(mDescriptorSetLayoutInfo, setLayout);
2386         if (!setLayoutInfo) {
2387             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << "Cannot find setLayout";
2388         }
2389 
2390         auto& setInfo = mDescriptorSetInfo[descriptorSet];
2391 
2392         setInfo.pool = pool;
2393         setInfo.bindings = setLayoutInfo->bindings;
2394 
2395         poolInfo->allocedSetsToBoxed[descriptorSet] = (VkDescriptorSet)boxedDescriptorSet;
2396         applyDescriptorSetAllocationLocked(*poolInfo, setInfo.bindings);
2397     }
2398 
on_vkAllocateDescriptorSets(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)2399     VkResult on_vkAllocateDescriptorSets(android::base::BumpPool* pool, VkDevice boxed_device,
2400                                          const VkDescriptorSetAllocateInfo* pAllocateInfo,
2401                                          VkDescriptorSet* pDescriptorSets) {
2402         auto device = unbox_VkDevice(boxed_device);
2403         auto vk = dispatch_VkDevice(boxed_device);
2404 
2405         std::lock_guard<std::recursive_mutex> lock(mLock);
2406 
2407         auto allocValidationRes = validateDescriptorSetAllocLocked(pAllocateInfo);
2408         if (allocValidationRes != VK_SUCCESS) return allocValidationRes;
2409 
2410         auto res = vk->vkAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
2411 
2412         if (res == VK_SUCCESS) {
2413             auto* poolInfo =
2414                 android::base::find(mDescriptorPoolInfo, pAllocateInfo->descriptorPool);
2415             if (!poolInfo) return res;
2416 
2417             for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; ++i) {
2418                 auto unboxed = pDescriptorSets[i];
2419                 pDescriptorSets[i] = new_boxed_non_dispatchable_VkDescriptorSet(pDescriptorSets[i]);
2420                 initDescriptorSetInfoLocked(pAllocateInfo->descriptorPool,
2421                                             pAllocateInfo->pSetLayouts[i],
2422                                             (uint64_t)(pDescriptorSets[i]), unboxed);
2423             }
2424         }
2425 
2426         return res;
2427     }
2428 
on_vkFreeDescriptorSets(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)2429     VkResult on_vkFreeDescriptorSets(android::base::BumpPool* pool, VkDevice boxed_device,
2430                                      VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
2431                                      const VkDescriptorSet* pDescriptorSets) {
2432         auto device = unbox_VkDevice(boxed_device);
2433         auto vk = dispatch_VkDevice(boxed_device);
2434 
2435         auto res =
2436             vk->vkFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
2437 
2438         if (res == VK_SUCCESS) {
2439             std::lock_guard<std::recursive_mutex> lock(mLock);
2440 
2441             for (uint32_t i = 0; i < descriptorSetCount; ++i) {
2442                 auto* setInfo = android::base::find(mDescriptorSetInfo, pDescriptorSets[i]);
2443                 if (!setInfo) continue;
2444                 auto* poolInfo = android::base::find(mDescriptorPoolInfo, setInfo->pool);
2445                 if (!poolInfo) continue;
2446 
2447                 removeDescriptorSetAllocationLocked(*poolInfo, setInfo->bindings);
2448 
2449                 auto descSetAllocedEntry =
2450                     android::base::find(poolInfo->allocedSetsToBoxed, pDescriptorSets[i]);
2451                 if (!descSetAllocedEntry) continue;
2452 
2453                 auto handleInfo = sBoxedHandleManager.get((uint64_t)*descSetAllocedEntry);
2454                 if (handleInfo) {
2455                     if (feature_is_enabled(kFeature_VulkanBatchedDescriptorSetUpdate)) {
2456                         handleInfo->underlying = reinterpret_cast<uint64_t>(VK_NULL_HANDLE);
2457                     } else {
2458                         delete_VkDescriptorSet(*descSetAllocedEntry);
2459                     }
2460                 }
2461 
2462                 poolInfo->allocedSetsToBoxed.erase(pDescriptorSets[i]);
2463 
2464                 mDescriptorSetInfo.erase(pDescriptorSets[i]);
2465             }
2466         }
2467 
2468         return res;
2469     }
2470 
on_vkUpdateDescriptorSets(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)2471     void on_vkUpdateDescriptorSets(android::base::BumpPool* pool, VkDevice boxed_device,
2472                                    uint32_t descriptorWriteCount,
2473                                    const VkWriteDescriptorSet* pDescriptorWrites,
2474                                    uint32_t descriptorCopyCount,
2475                                    const VkCopyDescriptorSet* pDescriptorCopies) {
2476         auto device = unbox_VkDevice(boxed_device);
2477         auto vk = dispatch_VkDevice(boxed_device);
2478 
2479         std::lock_guard<std::recursive_mutex> lock(mLock);
2480         on_vkUpdateDescriptorSetsImpl(pool, vk, device, descriptorWriteCount, pDescriptorWrites,
2481                                       descriptorCopyCount, pDescriptorCopies);
2482     }
2483 
on_vkUpdateDescriptorSetsImpl(android::base::BumpPool * pool,VulkanDispatch * vk,VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)2484     void on_vkUpdateDescriptorSetsImpl(android::base::BumpPool* pool, VulkanDispatch* vk,
2485                                        VkDevice device, uint32_t descriptorWriteCount,
2486                                        const VkWriteDescriptorSet* pDescriptorWrites,
2487                                        uint32_t descriptorCopyCount,
2488                                        const VkCopyDescriptorSet* pDescriptorCopies) {
2489         bool needEmulateWriteDescriptor = false;
2490         // c++ seems to allow for 0-size array allocation
2491         std::unique_ptr<bool[]> descriptorWritesNeedDeepCopy(new bool[descriptorWriteCount]);
2492         for (uint32_t i = 0; i < descriptorWriteCount; i++) {
2493             const VkWriteDescriptorSet& descriptorWrite = pDescriptorWrites[i];
2494             descriptorWritesNeedDeepCopy[i] = false;
2495             if (!vk_util::vk_descriptor_type_has_image_view(descriptorWrite.descriptorType)) {
2496                 continue;
2497             }
2498             for (uint32_t j = 0; j < descriptorWrite.descriptorCount; j++) {
2499                 const VkDescriptorImageInfo& imageInfo = descriptorWrite.pImageInfo[j];
2500                 const auto* imgViewInfo = android::base::find(mImageViewInfo, imageInfo.imageView);
2501                 if (!imgViewInfo) {
2502                     continue;
2503                 }
2504                 if (imgViewInfo->boundColorBuffer) {
2505                     // TODO(igorc): Move this to vkQueueSubmit time.
2506                     auto fb = FrameBuffer::getFB();
2507                     if (fb) {
2508                         fb->invalidateColorBufferForVk(imgViewInfo->boundColorBuffer);
2509                     }
2510                 }
2511                 if (descriptorWrite.descriptorType != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
2512                     continue;
2513                 }
2514                 const auto* samplerInfo = android::base::find(mSamplerInfo, imageInfo.sampler);
2515                 if (samplerInfo && imgViewInfo->needEmulatedAlpha &&
2516                     samplerInfo->needEmulatedAlpha) {
2517                     needEmulateWriteDescriptor = true;
2518                     descriptorWritesNeedDeepCopy[i] = true;
2519                     break;
2520                 }
2521             }
2522         }
2523         if (!needEmulateWriteDescriptor) {
2524             vk->vkUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites,
2525                                        descriptorCopyCount, pDescriptorCopies);
2526             return;
2527         }
2528         std::list<std::unique_ptr<VkDescriptorImageInfo[]>> imageInfoPool;
2529         std::unique_ptr<VkWriteDescriptorSet[]> descriptorWrites(
2530             new VkWriteDescriptorSet[descriptorWriteCount]);
2531         for (uint32_t i = 0; i < descriptorWriteCount; i++) {
2532             const VkWriteDescriptorSet& srcDescriptorWrite = pDescriptorWrites[i];
2533             VkWriteDescriptorSet& dstDescriptorWrite = descriptorWrites[i];
2534             // Shallow copy first
2535             dstDescriptorWrite = srcDescriptorWrite;
2536             if (!descriptorWritesNeedDeepCopy[i]) {
2537                 continue;
2538             }
2539             // Deep copy
2540             assert(dstDescriptorWrite.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
2541             imageInfoPool.emplace_back(
2542                 new VkDescriptorImageInfo[dstDescriptorWrite.descriptorCount]);
2543             VkDescriptorImageInfo* imageInfos = imageInfoPool.back().get();
2544             memcpy(imageInfos, srcDescriptorWrite.pImageInfo,
2545                    dstDescriptorWrite.descriptorCount * sizeof(VkDescriptorImageInfo));
2546             dstDescriptorWrite.pImageInfo = imageInfos;
2547             for (uint32_t j = 0; j < dstDescriptorWrite.descriptorCount; j++) {
2548                 VkDescriptorImageInfo& imageInfo = imageInfos[j];
2549                 const auto* imgViewInfo = android::base::find(mImageViewInfo, imageInfo.imageView);
2550                 auto* samplerInfo = android::base::find(mSamplerInfo, imageInfo.sampler);
2551                 if (!imgViewInfo || !samplerInfo) continue;
2552                 if (imgViewInfo->needEmulatedAlpha && samplerInfo->needEmulatedAlpha) {
2553                     if (samplerInfo->emulatedborderSampler == VK_NULL_HANDLE) {
2554                         // create the emulated sampler
2555                         VkSamplerCreateInfo createInfo;
2556                         deepcopy_VkSamplerCreateInfo(pool, VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
2557                                                      &samplerInfo->createInfo, &createInfo);
2558                         switch (createInfo.borderColor) {
2559                             case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK:
2560                                 createInfo.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK;
2561                                 break;
2562                             case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK:
2563                                 createInfo.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK;
2564                                 break;
2565                             case VK_BORDER_COLOR_FLOAT_CUSTOM_EXT:
2566                             case VK_BORDER_COLOR_INT_CUSTOM_EXT: {
2567                                 VkSamplerCustomBorderColorCreateInfoEXT*
2568                                     customBorderColorCreateInfo =
2569                                         vk_find_struct<VkSamplerCustomBorderColorCreateInfoEXT>(
2570                                             &createInfo);
2571                                 if (customBorderColorCreateInfo) {
2572                                     switch (createInfo.borderColor) {
2573                                         case VK_BORDER_COLOR_FLOAT_CUSTOM_EXT:
2574                                             customBorderColorCreateInfo->customBorderColor
2575                                                 .float32[3] = 1.0f;
2576                                             break;
2577                                         case VK_BORDER_COLOR_INT_CUSTOM_EXT:
2578                                             customBorderColorCreateInfo->customBorderColor
2579                                                 .int32[3] = 128;
2580                                             break;
2581                                         default:
2582                                             break;
2583                                     }
2584                                 }
2585                                 break;
2586                             }
2587                             default:
2588                                 break;
2589                         }
2590                         vk->vkCreateSampler(device, &createInfo, nullptr,
2591                                             &samplerInfo->emulatedborderSampler);
2592                     }
2593                     imageInfo.sampler = samplerInfo->emulatedborderSampler;
2594                 }
2595             }
2596         }
2597         vk->vkUpdateDescriptorSets(device, descriptorWriteCount, descriptorWrites.get(),
2598                                    descriptorCopyCount, pDescriptorCopies);
2599     }
2600 
on_vkCreateShaderModule(android::base::BumpPool * pool,VkDevice boxed_device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule)2601     VkResult on_vkCreateShaderModule(android::base::BumpPool* pool, VkDevice boxed_device,
2602                                      const VkShaderModuleCreateInfo* pCreateInfo,
2603                                      const VkAllocationCallbacks* pAllocator,
2604                                      VkShaderModule* pShaderModule) {
2605         auto device = unbox_VkDevice(boxed_device);
2606         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2607 
2608         VkResult result =
2609             deviceDispatch->vkCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
2610         if (result != VK_SUCCESS) {
2611             return result;
2612         }
2613 
2614         std::lock_guard<std::recursive_mutex> lock(mLock);
2615 
2616         auto& shaderModuleInfo = mShaderModuleInfo[*pShaderModule];
2617         shaderModuleInfo.device = device;
2618 
2619         *pShaderModule = new_boxed_non_dispatchable_VkShaderModule(*pShaderModule);
2620 
2621         return result;
2622     }
2623 
destroyShaderModuleLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)2624     void destroyShaderModuleLocked(VkDevice device, VulkanDispatch* deviceDispatch,
2625                                    VkShaderModule shaderModule,
2626                                    const VkAllocationCallbacks* pAllocator) {
2627         deviceDispatch->vkDestroyShaderModule(device, shaderModule, pAllocator);
2628 
2629         mShaderModuleInfo.erase(shaderModule);
2630     }
2631 
on_vkDestroyShaderModule(android::base::BumpPool * pool,VkDevice boxed_device,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)2632     void on_vkDestroyShaderModule(android::base::BumpPool* pool, VkDevice boxed_device,
2633                                   VkShaderModule shaderModule,
2634                                   const VkAllocationCallbacks* pAllocator) {
2635         auto device = unbox_VkDevice(boxed_device);
2636         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2637 
2638         std::lock_guard<std::recursive_mutex> lock(mLock);
2639         destroyShaderModuleLocked(device, deviceDispatch, shaderModule, pAllocator);
2640     }
2641 
on_vkCreatePipelineCache(android::base::BumpPool * pool,VkDevice boxed_device,const VkPipelineCacheCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineCache * pPipelineCache)2642     VkResult on_vkCreatePipelineCache(android::base::BumpPool* pool, VkDevice boxed_device,
2643                                       const VkPipelineCacheCreateInfo* pCreateInfo,
2644                                       const VkAllocationCallbacks* pAllocator,
2645                                       VkPipelineCache* pPipelineCache) {
2646         auto device = unbox_VkDevice(boxed_device);
2647         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2648 
2649         VkResult result =
2650             deviceDispatch->vkCreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
2651         if (result != VK_SUCCESS) {
2652             return result;
2653         }
2654 
2655         std::lock_guard<std::recursive_mutex> lock(mLock);
2656 
2657         auto& pipelineCacheInfo = mPipelineCacheInfo[*pPipelineCache];
2658         pipelineCacheInfo.device = device;
2659 
2660         *pPipelineCache = new_boxed_non_dispatchable_VkPipelineCache(*pPipelineCache);
2661 
2662         return result;
2663     }
2664 
destroyPipelineCacheLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)2665     void destroyPipelineCacheLocked(VkDevice device, VulkanDispatch* deviceDispatch,
2666                                     VkPipelineCache pipelineCache,
2667                                     const VkAllocationCallbacks* pAllocator) {
2668         deviceDispatch->vkDestroyPipelineCache(device, pipelineCache, pAllocator);
2669 
2670         mPipelineCacheInfo.erase(pipelineCache);
2671     }
2672 
on_vkDestroyPipelineCache(android::base::BumpPool * pool,VkDevice boxed_device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)2673     void on_vkDestroyPipelineCache(android::base::BumpPool* pool, VkDevice boxed_device,
2674                                    VkPipelineCache pipelineCache,
2675                                    const VkAllocationCallbacks* pAllocator) {
2676         auto device = unbox_VkDevice(boxed_device);
2677         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2678 
2679         std::lock_guard<std::recursive_mutex> lock(mLock);
2680         destroyPipelineCacheLocked(device, deviceDispatch, pipelineCache, pAllocator);
2681     }
2682 
on_vkCreateGraphicsPipelines(android::base::BumpPool * pool,VkDevice boxed_device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)2683     VkResult on_vkCreateGraphicsPipelines(android::base::BumpPool* pool, VkDevice boxed_device,
2684                                           VkPipelineCache pipelineCache, uint32_t createInfoCount,
2685                                           const VkGraphicsPipelineCreateInfo* pCreateInfos,
2686                                           const VkAllocationCallbacks* pAllocator,
2687                                           VkPipeline* pPipelines) {
2688         auto device = unbox_VkDevice(boxed_device);
2689         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2690 
2691         VkResult result = deviceDispatch->vkCreateGraphicsPipelines(
2692             device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
2693         if (result != VK_SUCCESS && result != VK_PIPELINE_COMPILE_REQUIRED) {
2694             return result;
2695         }
2696 
2697         std::lock_guard<std::recursive_mutex> lock(mLock);
2698 
2699         for (uint32_t i = 0; i < createInfoCount; i++) {
2700             if (!pPipelines[i]) {
2701                 continue;
2702             }
2703             auto& pipelineInfo = mPipelineInfo[pPipelines[i]];
2704             pipelineInfo.device = device;
2705 
2706             pPipelines[i] = new_boxed_non_dispatchable_VkPipeline(pPipelines[i]);
2707         }
2708 
2709         return result;
2710     }
2711 
destroyPipelineLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)2712     void destroyPipelineLocked(VkDevice device, VulkanDispatch* deviceDispatch, VkPipeline pipeline,
2713                                const VkAllocationCallbacks* pAllocator) {
2714         deviceDispatch->vkDestroyPipeline(device, pipeline, pAllocator);
2715 
2716         mPipelineInfo.erase(pipeline);
2717     }
2718 
on_vkDestroyPipeline(android::base::BumpPool * pool,VkDevice boxed_device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)2719     void on_vkDestroyPipeline(android::base::BumpPool* pool, VkDevice boxed_device,
2720                               VkPipeline pipeline, const VkAllocationCallbacks* pAllocator) {
2721         auto device = unbox_VkDevice(boxed_device);
2722         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2723 
2724         std::lock_guard<std::recursive_mutex> lock(mLock);
2725         destroyPipelineLocked(device, deviceDispatch, pipeline, pAllocator);
2726     }
2727 
on_vkCmdCopyImage(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)2728     void on_vkCmdCopyImage(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
2729                            VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
2730                            VkImageLayout dstImageLayout, uint32_t regionCount,
2731                            const VkImageCopy* pRegions) {
2732         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
2733         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
2734 
2735         std::lock_guard<std::recursive_mutex> lock(mLock);
2736         auto* srcImg = android::base::find(mImageInfo, srcImage);
2737         auto* dstImg = android::base::find(mImageInfo, dstImage);
2738         if (!srcImg || !dstImg) return;
2739 
2740         VkDevice device = srcImg->cmpInfo.device();
2741         auto* deviceInfo = android::base::find(mDeviceInfo, device);
2742         if (!deviceInfo) return;
2743 
2744         bool needEmulatedSrc = deviceInfo->needEmulatedDecompression(srcImg->cmpInfo);
2745         bool needEmulatedDst = deviceInfo->needEmulatedDecompression(dstImg->cmpInfo);
2746         if (!needEmulatedSrc && !needEmulatedDst) {
2747             vk->vkCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout,
2748                                regionCount, pRegions);
2749             return;
2750         }
2751         VkImage srcImageMip = srcImage;
2752         VkImage dstImageMip = dstImage;
2753         for (uint32_t r = 0; r < regionCount; r++) {
2754             if (needEmulatedSrc) {
2755                 srcImageMip = srcImg->cmpInfo.compressedMipmap(pRegions[r].srcSubresource.mipLevel);
2756             }
2757             if (needEmulatedDst) {
2758                 dstImageMip = dstImg->cmpInfo.compressedMipmap(pRegions[r].dstSubresource.mipLevel);
2759             }
2760             VkImageCopy region = CompressedImageInfo::getCompressedMipmapsImageCopy(
2761                 pRegions[r], srcImg->cmpInfo, dstImg->cmpInfo, needEmulatedSrc, needEmulatedDst);
2762             vk->vkCmdCopyImage(commandBuffer, srcImageMip, srcImageLayout, dstImageMip,
2763                                dstImageLayout, 1, &region);
2764         }
2765     }
2766 
on_vkCmdCopyImageToBuffer(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)2767     void on_vkCmdCopyImageToBuffer(android::base::BumpPool* pool,
2768                                    VkCommandBuffer boxed_commandBuffer, VkImage srcImage,
2769                                    VkImageLayout srcImageLayout, VkBuffer dstBuffer,
2770                                    uint32_t regionCount, const VkBufferImageCopy* pRegions) {
2771         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
2772         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
2773 
2774         std::lock_guard<std::recursive_mutex> lock(mLock);
2775         auto* imageInfo = android::base::find(mImageInfo, srcImage);
2776         auto* bufferInfo = android::base::find(mBufferInfo, dstBuffer);
2777         if (!imageInfo || !bufferInfo) return;
2778         auto* deviceInfo = android::base::find(mDeviceInfo, bufferInfo->device);
2779         if (!deviceInfo) return;
2780         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
2781         if (!deviceInfo->needEmulatedDecompression(cmpInfo)) {
2782             vk->vkCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer,
2783                                        regionCount, pRegions);
2784             return;
2785         }
2786         for (uint32_t r = 0; r < regionCount; r++) {
2787             uint32_t mipLevel = pRegions[r].imageSubresource.mipLevel;
2788             VkBufferImageCopy region = cmpInfo.getBufferImageCopy(pRegions[r]);
2789             vk->vkCmdCopyImageToBuffer(commandBuffer, cmpInfo.compressedMipmap(mipLevel),
2790                                        srcImageLayout, dstBuffer, 1, &region);
2791         }
2792     }
2793 
on_vkCmdCopyImage2(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCopyImageInfo2 * pCopyImageInfo)2794     void on_vkCmdCopyImage2(android::base::BumpPool* pool,
2795                            VkCommandBuffer boxed_commandBuffer,
2796                            const VkCopyImageInfo2* pCopyImageInfo) {
2797         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
2798         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
2799 
2800         std::lock_guard<std::recursive_mutex> lock(mLock);
2801         auto* srcImg = android::base::find(mImageInfo, pCopyImageInfo->srcImage);
2802         auto* dstImg = android::base::find(mImageInfo, pCopyImageInfo->dstImage);
2803         if (!srcImg || !dstImg) return;
2804 
2805         VkDevice device = srcImg->cmpInfo.device();
2806         auto* deviceInfo = android::base::find(mDeviceInfo, device);
2807         if (!deviceInfo) return;
2808 
2809         bool needEmulatedSrc = deviceInfo->needEmulatedDecompression(srcImg->cmpInfo);
2810         bool needEmulatedDst = deviceInfo->needEmulatedDecompression(dstImg->cmpInfo);
2811         if (!needEmulatedSrc && !needEmulatedDst) {
2812             vk->vkCmdCopyImage2(commandBuffer, pCopyImageInfo);
2813             return;
2814         }
2815         VkImage srcImageMip = pCopyImageInfo->srcImage;
2816         VkImage dstImageMip = pCopyImageInfo->dstImage;
2817         for (uint32_t r = 0; r < pCopyImageInfo->regionCount; r++) {
2818             if (needEmulatedSrc) {
2819                 srcImageMip = srcImg->cmpInfo.compressedMipmap(pCopyImageInfo->pRegions[r].srcSubresource.mipLevel);
2820             }
2821             if (needEmulatedDst) {
2822                 dstImageMip = dstImg->cmpInfo.compressedMipmap(pCopyImageInfo->pRegions[r].dstSubresource.mipLevel);
2823             }
2824 
2825             VkCopyImageInfo2 inf2 = *pCopyImageInfo;
2826             inf2.regionCount = 1;
2827             inf2.srcImage = srcImageMip;
2828             inf2.dstImage = dstImageMip;
2829 
2830             VkImageCopy2 region = CompressedImageInfo::getCompressedMipmapsImageCopy(
2831                 pCopyImageInfo->pRegions[r], srcImg->cmpInfo, dstImg->cmpInfo, needEmulatedSrc, needEmulatedDst);
2832             inf2.pRegions = &region;
2833 
2834             vk->vkCmdCopyImage2(commandBuffer, &inf2);
2835         }
2836     }
2837 
on_vkCmdCopyImageToBuffer2(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo)2838     void on_vkCmdCopyImageToBuffer2(android::base::BumpPool* pool,
2839                                    VkCommandBuffer boxed_commandBuffer,
2840                                    const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo) {
2841         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
2842         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
2843 
2844         std::lock_guard<std::recursive_mutex> lock(mLock);
2845         auto* imageInfo = android::base::find(mImageInfo, pCopyImageToBufferInfo->srcImage);
2846         auto* bufferInfo = android::base::find(mBufferInfo, pCopyImageToBufferInfo->dstBuffer);
2847         if (!imageInfo || !bufferInfo) return;
2848         auto* deviceInfo = android::base::find(mDeviceInfo, bufferInfo->device);
2849         if (!deviceInfo) return;
2850         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
2851         if (!deviceInfo->needEmulatedDecompression(cmpInfo)) {
2852             vk->vkCmdCopyImageToBuffer2(commandBuffer, pCopyImageToBufferInfo);
2853             return;
2854         }
2855         for (uint32_t r = 0; r < pCopyImageToBufferInfo->regionCount; r++) {
2856             uint32_t mipLevel = pCopyImageToBufferInfo->pRegions[r].imageSubresource.mipLevel;
2857             VkBufferImageCopy2 region = cmpInfo.getBufferImageCopy(pCopyImageToBufferInfo->pRegions[r]);
2858             VkCopyImageToBufferInfo2 inf = *pCopyImageToBufferInfo;
2859             inf.regionCount = 1;
2860             inf.pRegions = &region;
2861             inf.srcImage = cmpInfo.compressedMipmap(mipLevel);
2862 
2863             vk->vkCmdCopyImageToBuffer2(commandBuffer, &inf);
2864         }
2865     }
2866 
on_vkCmdCopyImage2KHR(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCopyImageInfo2KHR * pCopyImageInfo)2867     void on_vkCmdCopyImage2KHR(android::base::BumpPool* pool,
2868                            VkCommandBuffer boxed_commandBuffer,
2869                            const VkCopyImageInfo2KHR* pCopyImageInfo) {
2870         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
2871         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
2872 
2873         std::lock_guard<std::recursive_mutex> lock(mLock);
2874         auto* srcImg = android::base::find(mImageInfo, pCopyImageInfo->srcImage);
2875         auto* dstImg = android::base::find(mImageInfo, pCopyImageInfo->dstImage);
2876         if (!srcImg || !dstImg) return;
2877 
2878         VkDevice device = srcImg->cmpInfo.device();
2879         auto* deviceInfo = android::base::find(mDeviceInfo, device);
2880         if (!deviceInfo) return;
2881 
2882         bool needEmulatedSrc = deviceInfo->needEmulatedDecompression(srcImg->cmpInfo);
2883         bool needEmulatedDst = deviceInfo->needEmulatedDecompression(dstImg->cmpInfo);
2884         if (!needEmulatedSrc && !needEmulatedDst) {
2885             vk->vkCmdCopyImage2KHR(commandBuffer, pCopyImageInfo);
2886             return;
2887         }
2888         VkImage srcImageMip = pCopyImageInfo->srcImage;
2889         VkImage dstImageMip = pCopyImageInfo->dstImage;
2890         for (uint32_t r = 0; r < pCopyImageInfo->regionCount; r++) {
2891             if (needEmulatedSrc) {
2892                 srcImageMip = srcImg->cmpInfo.compressedMipmap(pCopyImageInfo->pRegions[r].srcSubresource.mipLevel);
2893             }
2894             if (needEmulatedDst) {
2895                 dstImageMip = dstImg->cmpInfo.compressedMipmap(pCopyImageInfo->pRegions[r].dstSubresource.mipLevel);
2896             }
2897 
2898             VkCopyImageInfo2KHR inf2 = *pCopyImageInfo;
2899             inf2.regionCount = 1;
2900             inf2.srcImage = srcImageMip;
2901             inf2.dstImage = dstImageMip;
2902 
2903             VkImageCopy2KHR region = CompressedImageInfo::getCompressedMipmapsImageCopy(
2904                 pCopyImageInfo->pRegions[r], srcImg->cmpInfo, dstImg->cmpInfo, needEmulatedSrc, needEmulatedDst);
2905             inf2.pRegions = &region;
2906 
2907             vk->vkCmdCopyImage2KHR(commandBuffer, &inf2);
2908         }
2909     }
2910 
on_vkCmdCopyImageToBuffer2KHR(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCopyImageToBufferInfo2KHR * pCopyImageToBufferInfo)2911     void on_vkCmdCopyImageToBuffer2KHR(android::base::BumpPool* pool,
2912                                    VkCommandBuffer boxed_commandBuffer,
2913                                    const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo) {
2914         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
2915         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
2916 
2917         std::lock_guard<std::recursive_mutex> lock(mLock);
2918         auto* imageInfo = android::base::find(mImageInfo, pCopyImageToBufferInfo->srcImage);
2919         auto* bufferInfo = android::base::find(mBufferInfo, pCopyImageToBufferInfo->dstBuffer);
2920         if (!imageInfo || !bufferInfo) return;
2921         auto* deviceInfo = android::base::find(mDeviceInfo, bufferInfo->device);
2922         if (!deviceInfo) return;
2923         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
2924         if (!deviceInfo->needEmulatedDecompression(cmpInfo)) {
2925             vk->vkCmdCopyImageToBuffer2KHR(commandBuffer, pCopyImageToBufferInfo);
2926             return;
2927         }
2928         for (uint32_t r = 0; r < pCopyImageToBufferInfo->regionCount; r++) {
2929             uint32_t mipLevel = pCopyImageToBufferInfo->pRegions[r].imageSubresource.mipLevel;
2930             VkBufferImageCopy2KHR region = cmpInfo.getBufferImageCopy(pCopyImageToBufferInfo->pRegions[r]);
2931             VkCopyImageToBufferInfo2KHR inf = *pCopyImageToBufferInfo;
2932             inf.regionCount = 1;
2933             inf.pRegions = &region;
2934             inf.srcImage = cmpInfo.compressedMipmap(mipLevel);
2935 
2936             vk->vkCmdCopyImageToBuffer2KHR(commandBuffer, &inf);
2937         }
2938     }
2939 
on_vkGetImageMemoryRequirements(android::base::BumpPool * pool,VkDevice boxed_device,VkImage image,VkMemoryRequirements * pMemoryRequirements)2940     void on_vkGetImageMemoryRequirements(android::base::BumpPool* pool, VkDevice boxed_device,
2941                                          VkImage image, VkMemoryRequirements* pMemoryRequirements) {
2942         auto device = unbox_VkDevice(boxed_device);
2943         auto vk = dispatch_VkDevice(boxed_device);
2944         vk->vkGetImageMemoryRequirements(device, image, pMemoryRequirements);
2945         std::lock_guard<std::recursive_mutex> lock(mLock);
2946         updateImageMemorySizeLocked(device, image, pMemoryRequirements);
2947     }
2948 
on_vkGetImageMemoryRequirements2(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)2949     void on_vkGetImageMemoryRequirements2(android::base::BumpPool* pool, VkDevice boxed_device,
2950                                           const VkImageMemoryRequirementsInfo2* pInfo,
2951                                           VkMemoryRequirements2* pMemoryRequirements) {
2952         auto device = unbox_VkDevice(boxed_device);
2953         auto vk = dispatch_VkDevice(boxed_device);
2954         std::lock_guard<std::recursive_mutex> lock(mLock);
2955 
2956         auto physicalDevice = mDeviceToPhysicalDevice[device];
2957         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
2958         if (!physdevInfo) {
2959             // If this fails, we crash, as we assume that the memory properties
2960             // map should have the info.
2961             // fprintf(stderr, "%s: Could not get image memory requirement for VkPhysicalDevice\n");
2962         }
2963 
2964         if ((physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
2965             vk->vkGetImageMemoryRequirements2) {
2966             vk->vkGetImageMemoryRequirements2(device, pInfo, pMemoryRequirements);
2967         } else if (hasDeviceExtension(device, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME)) {
2968             vk->vkGetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
2969         } else {
2970             if (pInfo->pNext) {
2971                 ERR("Warning: trying to use extension struct in VkMemoryRequirements2 without "
2972                     "having enabled the extension!");
2973             }
2974 
2975             vk->vkGetImageMemoryRequirements(device, pInfo->image,
2976                                              &pMemoryRequirements->memoryRequirements);
2977         }
2978         updateImageMemorySizeLocked(device, pInfo->image, &pMemoryRequirements->memoryRequirements);
2979     }
2980 
on_vkGetBufferMemoryRequirements(android::base::BumpPool * pool,VkDevice boxed_device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)2981     void on_vkGetBufferMemoryRequirements(android::base::BumpPool* pool, VkDevice boxed_device,
2982                                           VkBuffer buffer,
2983                                           VkMemoryRequirements* pMemoryRequirements) {
2984         auto device = unbox_VkDevice(boxed_device);
2985         auto vk = dispatch_VkDevice(boxed_device);
2986         vk->vkGetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
2987     }
2988 
on_vkGetBufferMemoryRequirements2(android::base::BumpPool * pool,VkDevice boxed_device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)2989     void on_vkGetBufferMemoryRequirements2(android::base::BumpPool* pool, VkDevice boxed_device,
2990                                            const VkBufferMemoryRequirementsInfo2* pInfo,
2991                                            VkMemoryRequirements2* pMemoryRequirements) {
2992         auto device = unbox_VkDevice(boxed_device);
2993         auto vk = dispatch_VkDevice(boxed_device);
2994 
2995         std::lock_guard<std::recursive_mutex> lock(mLock);
2996 
2997         auto* physicalDevice = android::base::find(mDeviceToPhysicalDevice, device);
2998         if (!physicalDevice) {
2999             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
3000                 << "No physical device available for " << device;
3001         }
3002 
3003         auto* physicalDeviceInfo = android::base::find(mPhysdevInfo, *physicalDevice);
3004         if (!physicalDeviceInfo) {
3005             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
3006                 << "No physical device info available for " << *physicalDevice;
3007         }
3008 
3009         if ((physicalDeviceInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
3010             vk->vkGetBufferMemoryRequirements2) {
3011             vk->vkGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
3012         } else if (hasDeviceExtension(device, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME)) {
3013             vk->vkGetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
3014         } else {
3015             if (pInfo->pNext) {
3016                 ERR("Warning: trying to use extension struct in VkMemoryRequirements2 without "
3017                     "having enabled the extension!");
3018             }
3019 
3020             vk->vkGetBufferMemoryRequirements(device, pInfo->buffer,
3021                                               &pMemoryRequirements->memoryRequirements);
3022         }
3023     }
3024 
on_vkCmdCopyBufferToImage(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions,const VkDecoderContext & context)3025     void on_vkCmdCopyBufferToImage(android::base::BumpPool* pool,
3026                                    VkCommandBuffer boxed_commandBuffer, VkBuffer srcBuffer,
3027                                    VkImage dstImage, VkImageLayout dstImageLayout,
3028                                    uint32_t regionCount, const VkBufferImageCopy* pRegions,
3029                                    const VkDecoderContext& context) {
3030         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
3031         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
3032 
3033         std::lock_guard<std::recursive_mutex> lock(mLock);
3034         auto* imageInfo = android::base::find(mImageInfo, dstImage);
3035         if (!imageInfo) return;
3036         auto* bufferInfo = android::base::find(mBufferInfo, srcBuffer);
3037         if (!bufferInfo) {
3038             return;
3039         }
3040         VkDevice device = bufferInfo->device;
3041         auto* deviceInfo = android::base::find(mDeviceInfo, device);
3042         if (!deviceInfo) {
3043             return;
3044         }
3045         if (!deviceInfo->needEmulatedDecompression(imageInfo->cmpInfo)) {
3046             vk->vkCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout,
3047                                        regionCount, pRegions);
3048             return;
3049         }
3050         auto* cmdBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
3051         if (!cmdBufferInfo) {
3052             return;
3053         }
3054         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
3055 
3056         for (uint32_t r = 0; r < regionCount; r++) {
3057             uint32_t mipLevel = pRegions[r].imageSubresource.mipLevel;
3058             VkBufferImageCopy region = cmpInfo.getBufferImageCopy(pRegions[r]);
3059             vk->vkCmdCopyBufferToImage(commandBuffer, srcBuffer, cmpInfo.compressedMipmap(mipLevel),
3060                                        dstImageLayout, 1, &region);
3061         }
3062 
3063         if (cmpInfo.canDecompressOnCpu()) {
3064             // Get a pointer to the compressed image memory
3065             const MemoryInfo* memoryInfo = android::base::find(mMemoryInfo, bufferInfo->memory);
3066             if (!memoryInfo) {
3067                 WARN("ASTC CPU decompression: couldn't find mapped memory info");
3068                 return;
3069             }
3070             if (!memoryInfo->ptr) {
3071                 WARN("ASTC CPU decompression: VkBuffer memory isn't host-visible");
3072                 return;
3073             }
3074             uint8_t* astcData = (uint8_t*)(memoryInfo->ptr) + bufferInfo->memoryOffset;
3075             cmpInfo.decompressOnCpu(commandBuffer, astcData, bufferInfo->size, dstImage,
3076                                     dstImageLayout, regionCount, pRegions, context);
3077         }
3078     }
3079 
on_vkCmdCopyBufferToImage2(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo,const VkDecoderContext & context)3080     void on_vkCmdCopyBufferToImage2(android::base::BumpPool* pool,
3081                                     VkCommandBuffer boxed_commandBuffer,
3082                                     const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo,
3083                                     const VkDecoderContext& context) {
3084         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
3085         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
3086 
3087         std::lock_guard<std::recursive_mutex> lock(mLock);
3088         auto* imageInfo = android::base::find(mImageInfo, pCopyBufferToImageInfo->dstImage);
3089         if (!imageInfo) return;
3090         auto* bufferInfo = android::base::find(mBufferInfo, pCopyBufferToImageInfo->srcBuffer);
3091         if (!bufferInfo) {
3092             return;
3093         }
3094         VkDevice device = bufferInfo->device;
3095         auto* deviceInfo = android::base::find(mDeviceInfo, device);
3096         if (!deviceInfo) {
3097             return;
3098         }
3099         if (!deviceInfo->needEmulatedDecompression(imageInfo->cmpInfo)) {
3100             vk->vkCmdCopyBufferToImage2(commandBuffer, pCopyBufferToImageInfo);
3101             return;
3102         }
3103         auto* cmdBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
3104         if (!cmdBufferInfo) {
3105             return;
3106         }
3107         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
3108 
3109         for (uint32_t r = 0; r < pCopyBufferToImageInfo->regionCount; r++) {
3110             VkCopyBufferToImageInfo2 inf;
3111             uint32_t mipLevel = pCopyBufferToImageInfo->pRegions[r].imageSubresource.mipLevel;
3112             inf.dstImage = cmpInfo.compressedMipmap(mipLevel);
3113             VkBufferImageCopy2 region = cmpInfo.getBufferImageCopy(pCopyBufferToImageInfo->pRegions[r]);
3114             inf.regionCount = 1;
3115             inf.pRegions = &region;
3116 
3117             vk->vkCmdCopyBufferToImage2(commandBuffer, &inf);
3118         }
3119 
3120         if (cmpInfo.canDecompressOnCpu()) {
3121             // Get a pointer to the compressed image memory
3122             const MemoryInfo* memoryInfo = android::base::find(mMemoryInfo, bufferInfo->memory);
3123             if (!memoryInfo) {
3124                 WARN("ASTC CPU decompression: couldn't find mapped memory info");
3125                 return;
3126             }
3127             if (!memoryInfo->ptr) {
3128                 WARN("ASTC CPU decompression: VkBuffer memory isn't host-visible");
3129                 return;
3130             }
3131             uint8_t* astcData = (uint8_t*)(memoryInfo->ptr) + bufferInfo->memoryOffset;
3132 
3133             cmpInfo.decompressOnCpu(commandBuffer, astcData, bufferInfo->size, pCopyBufferToImageInfo, context);
3134         }
3135     }
3136 
on_vkCmdCopyBufferToImage2KHR(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCopyBufferToImageInfo2KHR * pCopyBufferToImageInfo,const VkDecoderContext & context)3137     void on_vkCmdCopyBufferToImage2KHR(android::base::BumpPool* pool,
3138                                     VkCommandBuffer boxed_commandBuffer,
3139                                     const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo,
3140                                     const VkDecoderContext& context) {
3141         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
3142         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
3143 
3144         std::lock_guard<std::recursive_mutex> lock(mLock);
3145         auto* imageInfo = android::base::find(mImageInfo, pCopyBufferToImageInfo->dstImage);
3146         if (!imageInfo) return;
3147         auto* bufferInfo = android::base::find(mBufferInfo, pCopyBufferToImageInfo->srcBuffer);
3148         if (!bufferInfo) {
3149             return;
3150         }
3151         VkDevice device = bufferInfo->device;
3152         auto* deviceInfo = android::base::find(mDeviceInfo, device);
3153         if (!deviceInfo) {
3154             return;
3155         }
3156         if (!deviceInfo->needEmulatedDecompression(imageInfo->cmpInfo)) {
3157             vk->vkCmdCopyBufferToImage2KHR(commandBuffer, pCopyBufferToImageInfo);
3158             return;
3159         }
3160         auto* cmdBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
3161         if (!cmdBufferInfo) {
3162             return;
3163         }
3164         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
3165 
3166         for (uint32_t r = 0; r < pCopyBufferToImageInfo->regionCount; r++) {
3167             VkCopyBufferToImageInfo2KHR inf;
3168             uint32_t mipLevel = pCopyBufferToImageInfo->pRegions[r].imageSubresource.mipLevel;
3169             inf.dstImage = cmpInfo.compressedMipmap(mipLevel);
3170             VkBufferImageCopy2KHR region = cmpInfo.getBufferImageCopy(pCopyBufferToImageInfo->pRegions[r]);
3171             inf.regionCount = 1;
3172             inf.pRegions = &region;
3173 
3174             vk->vkCmdCopyBufferToImage2KHR(commandBuffer, &inf);
3175         }
3176 
3177         if (cmpInfo.canDecompressOnCpu()) {
3178             // Get a pointer to the compressed image memory
3179             const MemoryInfo* memoryInfo = android::base::find(mMemoryInfo, bufferInfo->memory);
3180             if (!memoryInfo) {
3181                 WARN("ASTC CPU decompression: couldn't find mapped memory info");
3182                 return;
3183             }
3184             if (!memoryInfo->ptr) {
3185                 WARN("ASTC CPU decompression: VkBuffer memory isn't host-visible");
3186                 return;
3187             }
3188             uint8_t* astcData = (uint8_t*)(memoryInfo->ptr) + bufferInfo->memoryOffset;
3189 
3190             cmpInfo.decompressOnCpu(commandBuffer, astcData, bufferInfo->size, pCopyBufferToImageInfo, context);
3191         }
3192     }
3193 
convertQueueFamilyForeignToExternal(uint32_t * queueFamilyIndexPtr)3194     inline void convertQueueFamilyForeignToExternal(uint32_t* queueFamilyIndexPtr) {
3195         if (*queueFamilyIndexPtr == VK_QUEUE_FAMILY_FOREIGN_EXT) {
3196             *queueFamilyIndexPtr = VK_QUEUE_FAMILY_EXTERNAL;
3197         }
3198     }
3199 
convertQueueFamilyForeignToExternal_VkBufferMemoryBarrier(VkBufferMemoryBarrier * barrier)3200     inline void convertQueueFamilyForeignToExternal_VkBufferMemoryBarrier(
3201         VkBufferMemoryBarrier* barrier) {
3202         convertQueueFamilyForeignToExternal(&barrier->srcQueueFamilyIndex);
3203         convertQueueFamilyForeignToExternal(&barrier->dstQueueFamilyIndex);
3204     }
3205 
convertQueueFamilyForeignToExternal_VkImageMemoryBarrier(VkImageMemoryBarrier * barrier)3206     inline void convertQueueFamilyForeignToExternal_VkImageMemoryBarrier(
3207         VkImageMemoryBarrier* barrier) {
3208         convertQueueFamilyForeignToExternal(&barrier->srcQueueFamilyIndex);
3209         convertQueueFamilyForeignToExternal(&barrier->dstQueueFamilyIndex);
3210     }
3211 
on_vkCmdPipelineBarrier(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)3212     void on_vkCmdPipelineBarrier(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
3213                                  VkPipelineStageFlags srcStageMask,
3214                                  VkPipelineStageFlags dstStageMask,
3215                                  VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount,
3216                                  const VkMemoryBarrier* pMemoryBarriers,
3217                                  uint32_t bufferMemoryBarrierCount,
3218                                  const VkBufferMemoryBarrier* pBufferMemoryBarriers,
3219                                  uint32_t imageMemoryBarrierCount,
3220                                  const VkImageMemoryBarrier* pImageMemoryBarriers) {
3221         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
3222         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
3223 
3224         for (uint32_t i = 0; i < bufferMemoryBarrierCount; ++i) {
3225             convertQueueFamilyForeignToExternal_VkBufferMemoryBarrier(
3226                 ((VkBufferMemoryBarrier*)pBufferMemoryBarriers) + i);
3227         }
3228 
3229         for (uint32_t i = 0; i < imageMemoryBarrierCount; ++i) {
3230             convertQueueFamilyForeignToExternal_VkImageMemoryBarrier(
3231                 ((VkImageMemoryBarrier*)pImageMemoryBarriers) + i);
3232         }
3233 
3234         if (imageMemoryBarrierCount == 0) {
3235             vk->vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
3236                                      memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
3237                                      pBufferMemoryBarriers, imageMemoryBarrierCount,
3238                                      pImageMemoryBarriers);
3239             return;
3240         }
3241         std::lock_guard<std::recursive_mutex> lock(mLock);
3242         CommandBufferInfo* cmdBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
3243         if (!cmdBufferInfo) return;
3244 
3245         DeviceInfo* deviceInfo = android::base::find(mDeviceInfo, cmdBufferInfo->device);
3246         if (!deviceInfo) return;
3247 
3248         if (!deviceInfo->emulateTextureEtc2 && !deviceInfo->emulateTextureAstc) {
3249             vk->vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
3250                                      memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
3251                                      pBufferMemoryBarriers, imageMemoryBarrierCount,
3252                                      pImageMemoryBarriers);
3253             return;
3254         }
3255 
3256         // This is a compressed image. Handle decompression before calling vkCmdPipelineBarrier
3257 
3258         std::vector<VkImageMemoryBarrier> imageBarriers;
3259         bool needRebind = false;
3260 
3261         for (uint32_t i = 0; i < imageMemoryBarrierCount; i++) {
3262             const VkImageMemoryBarrier& srcBarrier = pImageMemoryBarriers[i];
3263             auto* imageInfo = android::base::find(mImageInfo, srcBarrier.image);
3264 
3265             // If the image doesn't need GPU decompression, nothing to do.
3266             if (!imageInfo || !deviceInfo->needGpuDecompression(imageInfo->cmpInfo)) {
3267                 imageBarriers.push_back(srcBarrier);
3268                 continue;
3269             }
3270 
3271             // Otherwise, decompress the image, if we're going to read from it.
3272             needRebind |= imageInfo->cmpInfo.decompressIfNeeded(
3273                 vk, commandBuffer, srcStageMask, dstStageMask, srcBarrier, imageBarriers);
3274         }
3275 
3276         if (needRebind && cmdBufferInfo->computePipeline) {
3277             // Recover pipeline bindings
3278             // TODO(gregschlom): instead of doing this here again and again after each image we
3279             // decompress, could we do it once before calling vkCmdDispatch?
3280             vk->vkCmdBindPipeline(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE,
3281                                   cmdBufferInfo->computePipeline);
3282             if (!cmdBufferInfo->descriptorSets.empty()) {
3283                 vk->vkCmdBindDescriptorSets(
3284                     commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, cmdBufferInfo->descriptorLayout,
3285                     cmdBufferInfo->firstSet, cmdBufferInfo->descriptorSets.size(),
3286                     cmdBufferInfo->descriptorSets.data(), cmdBufferInfo->dynamicOffsets.size(),
3287                     cmdBufferInfo->dynamicOffsets.data());
3288             }
3289         }
3290 
3291         // Apply the remaining barriers
3292         if (memoryBarrierCount || bufferMemoryBarrierCount || !imageBarriers.empty()) {
3293             vk->vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
3294                                      memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
3295                                      pBufferMemoryBarriers, imageBarriers.size(),
3296                                      imageBarriers.data());
3297         }
3298     }
3299 
mapHostVisibleMemoryToGuestPhysicalAddressLocked(VulkanDispatch * vk,VkDevice device,VkDeviceMemory memory,uint64_t physAddr)3300     bool mapHostVisibleMemoryToGuestPhysicalAddressLocked(VulkanDispatch* vk, VkDevice device,
3301                                                           VkDeviceMemory memory,
3302                                                           uint64_t physAddr) {
3303         if (!feature_is_enabled(kFeature_GLDirectMem) &&
3304             !feature_is_enabled(kFeature_VirtioGpuNext)) {
3305             // fprintf(stderr, "%s: Tried to use direct mapping "
3306             // "while GLDirectMem is not enabled!\n");
3307         }
3308 
3309         auto* info = android::base::find(mMemoryInfo, memory);
3310         if (!info) return false;
3311 
3312         info->guestPhysAddr = physAddr;
3313 
3314         constexpr size_t kPageBits = 12;
3315         constexpr size_t kPageSize = 1u << kPageBits;
3316         constexpr size_t kPageOffsetMask = kPageSize - 1;
3317 
3318         uintptr_t addr = reinterpret_cast<uintptr_t>(info->ptr);
3319         uintptr_t pageOffset = addr & kPageOffsetMask;
3320 
3321         info->pageAlignedHva = reinterpret_cast<void*>(addr - pageOffset);
3322         info->sizeToPage = ((info->size + pageOffset + kPageSize - 1) >> kPageBits) << kPageBits;
3323 
3324         if (mLogging) {
3325             fprintf(stderr, "%s: map: %p, %p -> [0x%llx 0x%llx]\n", __func__, info->ptr,
3326                     info->pageAlignedHva, (unsigned long long)info->guestPhysAddr,
3327                     (unsigned long long)info->guestPhysAddr + info->sizeToPage);
3328         }
3329 
3330         info->directMapped = true;
3331         uint64_t gpa = info->guestPhysAddr;
3332         void* hva = info->pageAlignedHva;
3333         size_t sizeToPage = info->sizeToPage;
3334 
3335         AutoLock occupiedGpasLock(mOccupiedGpasLock);
3336 
3337         auto* existingMemoryInfo = android::base::find(mOccupiedGpas, gpa);
3338         if (existingMemoryInfo) {
3339             fprintf(stderr, "%s: WARNING: already mapped gpa 0x%llx, replacing", __func__,
3340                     (unsigned long long)gpa);
3341 
3342             get_emugl_vm_operations().unmapUserBackedRam(existingMemoryInfo->gpa,
3343                                                          existingMemoryInfo->sizeToPage);
3344 
3345             mOccupiedGpas.erase(gpa);
3346         }
3347 
3348         get_emugl_vm_operations().mapUserBackedRam(gpa, hva, sizeToPage);
3349 
3350         if (mVerbosePrints) {
3351             fprintf(stderr, "VERBOSE:%s: registering gpa 0x%llx to mOccupiedGpas\n", __func__,
3352                     (unsigned long long)gpa);
3353         }
3354 
3355         mOccupiedGpas[gpa] = {
3356             vk, device, memory, gpa, sizeToPage,
3357         };
3358 
3359         if (!mUseOldMemoryCleanupPath) {
3360             get_emugl_address_space_device_control_ops().register_deallocation_callback(
3361                 this, gpa, [](void* thisPtr, uint64_t gpa) {
3362                     Impl* implPtr = (Impl*)thisPtr;
3363                     implPtr->unmapMemoryAtGpaIfExists(gpa);
3364                 });
3365         }
3366 
3367         return true;
3368     }
3369 
3370     // Only call this from the address space device deallocation operation's
3371     // context, or it's possible that the guest/host view of which gpa's are
3372     // occupied goes out of sync.
unmapMemoryAtGpaIfExists(uint64_t gpa)3373     void unmapMemoryAtGpaIfExists(uint64_t gpa) {
3374         AutoLock lock(mOccupiedGpasLock);
3375 
3376         if (mVerbosePrints) {
3377             fprintf(stderr, "VERBOSE:%s: deallocation callback for gpa 0x%llx\n", __func__,
3378                     (unsigned long long)gpa);
3379         }
3380 
3381         auto* existingMemoryInfo = android::base::find(mOccupiedGpas, gpa);
3382         if (!existingMemoryInfo) return;
3383 
3384         get_emugl_vm_operations().unmapUserBackedRam(existingMemoryInfo->gpa,
3385                                                      existingMemoryInfo->sizeToPage);
3386 
3387         mOccupiedGpas.erase(gpa);
3388     }
3389 
on_vkAllocateMemory(android::base::BumpPool * pool,VkDevice boxed_device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)3390     VkResult on_vkAllocateMemory(android::base::BumpPool* pool, VkDevice boxed_device,
3391                                  const VkMemoryAllocateInfo* pAllocateInfo,
3392                                  const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) {
3393         auto device = unbox_VkDevice(boxed_device);
3394         auto vk = dispatch_VkDevice(boxed_device);
3395         auto* tInfo = RenderThreadInfoVk::get();
3396 
3397         if (!pAllocateInfo) return VK_ERROR_INITIALIZATION_FAILED;
3398 
3399         VkMemoryAllocateInfo localAllocInfo = vk_make_orphan_copy(*pAllocateInfo);
3400         vk_struct_chain_iterator structChainIter = vk_make_chain_iterator(&localAllocInfo);
3401 
3402         VkMemoryAllocateFlagsInfo allocFlagsInfo;
3403         VkMemoryOpaqueCaptureAddressAllocateInfo opaqueCaptureAddressAllocInfo;
3404 
3405         const VkMemoryAllocateFlagsInfo* allocFlagsInfoPtr =
3406             vk_find_struct<VkMemoryAllocateFlagsInfo>(pAllocateInfo);
3407         const VkMemoryOpaqueCaptureAddressAllocateInfo* opaqueCaptureAddressAllocInfoPtr =
3408             vk_find_struct<VkMemoryOpaqueCaptureAddressAllocateInfo>(pAllocateInfo);
3409 
3410         if (allocFlagsInfoPtr) {
3411             allocFlagsInfo = *allocFlagsInfoPtr;
3412             vk_append_struct(&structChainIter, &allocFlagsInfo);
3413         }
3414 
3415         if (opaqueCaptureAddressAllocInfoPtr) {
3416             opaqueCaptureAddressAllocInfo = *opaqueCaptureAddressAllocInfoPtr;
3417             vk_append_struct(&structChainIter, &opaqueCaptureAddressAllocInfo);
3418         }
3419 
3420         const VkMemoryDedicatedAllocateInfo* dedicatedAllocInfoPtr =
3421             vk_find_struct<VkMemoryDedicatedAllocateInfo>(pAllocateInfo);
3422         VkMemoryDedicatedAllocateInfo localDedicatedAllocInfo;
3423 
3424         if (dedicatedAllocInfoPtr) {
3425             localDedicatedAllocInfo = vk_make_orphan_copy(*dedicatedAllocInfoPtr);
3426         }
3427         if (!usingDirectMapping()) {
3428             // We copy bytes 1 page at a time from the guest to the host
3429             // if we are not using direct mapping. This means we can end up
3430             // writing over memory we did not intend.
3431             // E.g. swiftshader just allocated with malloc, which can have
3432             // data stored between allocations.
3433         #ifdef PAGE_SIZE
3434             localAllocInfo.allocationSize += static_cast<VkDeviceSize>(PAGE_SIZE);
3435             localAllocInfo.allocationSize &= ~static_cast<VkDeviceSize>(PAGE_SIZE - 1);
3436         #elif defined(_WIN32)
3437             localAllocInfo.allocationSize += static_cast<VkDeviceSize>(4096);
3438             localAllocInfo.allocationSize &= ~static_cast<VkDeviceSize>(4095);
3439         #else
3440             localAllocInfo.allocationSize += static_cast<VkDeviceSize>(getpagesize());
3441             localAllocInfo.allocationSize &= ~static_cast<VkDeviceSize>(getpagesize() - 1);
3442         #endif
3443         }
3444         // Note for AHardwareBuffers, the Vulkan spec states:
3445         //
3446         //     Android hardware buffers have intrinsic width, height, format, and usage
3447         //     properties, so Vulkan images bound to memory imported from an Android
3448         //     hardware buffer must use dedicated allocations
3449         //
3450         // so any allocation requests with a VkImportAndroidHardwareBufferInfoANDROID
3451         // will necessarily have a VkMemoryDedicatedAllocateInfo. However, the host
3452         // may or may not actually use a dedicated allocations during Buffer/ColorBuffer
3453         // setup. Below checks if the underlying Buffer/ColorBuffer backing memory was
3454         // originally created with a dedicated allocation.
3455         bool shouldUseDedicatedAllocInfo = dedicatedAllocInfoPtr != nullptr;
3456 
3457         const VkImportColorBufferGOOGLE* importCbInfoPtr =
3458             vk_find_struct<VkImportColorBufferGOOGLE>(pAllocateInfo);
3459         const VkImportBufferGOOGLE* importBufferInfoPtr =
3460             vk_find_struct<VkImportBufferGOOGLE>(pAllocateInfo);
3461 
3462         const VkCreateBlobGOOGLE* createBlobInfoPtr =
3463             vk_find_struct<VkCreateBlobGOOGLE>(pAllocateInfo);
3464 
3465 #ifdef _WIN32
3466         VkImportMemoryWin32HandleInfoKHR importInfo{
3467             VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
3468             0,
3469             VK_EXT_MEMORY_HANDLE_TYPE_BIT,
3470             VK_EXT_MEMORY_HANDLE_INVALID,
3471             L"",
3472         };
3473 #elif defined(__QNX__)
3474         VkImportScreenBufferInfoQNX importInfo{
3475             VK_STRUCTURE_TYPE_IMPORT_SCREEN_BUFFER_INFO_QNX,
3476             0,
3477             VK_EXT_MEMORY_HANDLE_INVALID,
3478         };
3479 #else
3480         VkImportMemoryFdInfoKHR importInfo{
3481             VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
3482             0,
3483             VK_EXT_MEMORY_HANDLE_TYPE_BIT,
3484             VK_EXT_MEMORY_HANDLE_INVALID,
3485         };
3486 #endif
3487 
3488         void* mappedPtr = nullptr;
3489         ManagedDescriptor externalMemoryHandle;
3490         if (importCbInfoPtr) {
3491             bool vulkanOnly = mGuestUsesAngle;
3492 
3493             bool colorBufferMemoryUsesDedicatedAlloc = false;
3494             if (!getColorBufferAllocationInfo(importCbInfoPtr->colorBuffer,
3495                                               &localAllocInfo.allocationSize,
3496                                               &localAllocInfo.memoryTypeIndex,
3497                                               &colorBufferMemoryUsesDedicatedAlloc, &mappedPtr)) {
3498                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
3499                     << "Failed to get allocation info for ColorBuffer:"
3500                     << importCbInfoPtr->colorBuffer;
3501             }
3502 
3503             shouldUseDedicatedAllocInfo &= colorBufferMemoryUsesDedicatedAlloc;
3504 
3505             if (!vulkanOnly) {
3506                 auto fb = FrameBuffer::getFB();
3507                 if (fb) {
3508                     fb->invalidateColorBufferForVk(importCbInfoPtr->colorBuffer);
3509                 }
3510             }
3511 
3512             if (m_emu->instanceSupportsExternalMemoryCapabilities) {
3513                 VK_EXT_MEMORY_HANDLE cbExtMemoryHandle =
3514                     getColorBufferExtMemoryHandle(importCbInfoPtr->colorBuffer);
3515 
3516                 if (cbExtMemoryHandle == VK_EXT_MEMORY_HANDLE_INVALID) {
3517                     fprintf(stderr,
3518                             "%s: VK_ERROR_OUT_OF_DEVICE_MEMORY: "
3519                             "colorBuffer 0x%x does not have Vulkan external memory backing\n",
3520                             __func__, importCbInfoPtr->colorBuffer);
3521                     return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3522                 }
3523 
3524 #if defined(__QNX__)
3525                 importInfo.buffer = cbExtMemoryHandle;
3526 #else
3527                 externalMemoryHandle = ManagedDescriptor(dupExternalMemory(cbExtMemoryHandle));
3528 
3529 #ifdef _WIN32
3530                 importInfo.handle = externalMemoryHandle.get().value_or(static_cast<HANDLE>(NULL));
3531 #else
3532                 importInfo.fd = externalMemoryHandle.get().value_or(-1);
3533 #endif
3534 #endif
3535                 vk_append_struct(&structChainIter, &importInfo);
3536             }
3537         }
3538 
3539         if (importBufferInfoPtr) {
3540             bool bufferMemoryUsesDedicatedAlloc = false;
3541             if (!getBufferAllocationInfo(
3542                     importBufferInfoPtr->buffer, &localAllocInfo.allocationSize,
3543                     &localAllocInfo.memoryTypeIndex, &bufferMemoryUsesDedicatedAlloc)) {
3544                 ERR("Failed to get Buffer:%d allocation info.", importBufferInfoPtr->buffer);
3545                 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3546             }
3547 
3548             shouldUseDedicatedAllocInfo &= bufferMemoryUsesDedicatedAlloc;
3549 
3550             if (m_emu->instanceSupportsExternalMemoryCapabilities) {
3551                 VK_EXT_MEMORY_HANDLE bufferExtMemoryHandle =
3552                     getBufferExtMemoryHandle(importBufferInfoPtr->buffer);
3553 
3554                 if (bufferExtMemoryHandle == VK_EXT_MEMORY_HANDLE_INVALID) {
3555                     fprintf(stderr,
3556                             "%s: VK_ERROR_OUT_OF_DEVICE_MEMORY: "
3557                             "buffer 0x%x does not have Vulkan external memory "
3558                             "backing\n",
3559                             __func__, importBufferInfoPtr->buffer);
3560                     return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3561                 }
3562 
3563 #if defined(__QNX__)
3564                 importInfo.buffer = bufferExtMemoryHandle;
3565 #else
3566                 bufferExtMemoryHandle = dupExternalMemory(bufferExtMemoryHandle);
3567 
3568 #ifdef _WIN32
3569                 importInfo.handle = bufferExtMemoryHandle;
3570 #else
3571                 importInfo.fd = bufferExtMemoryHandle;
3572 #endif
3573 #endif
3574                 vk_append_struct(&structChainIter, &importInfo);
3575             }
3576         }
3577 
3578         VkMemoryPropertyFlags memoryPropertyFlags;
3579         {
3580             std::lock_guard<std::recursive_mutex> lock(mLock);
3581 
3582             auto* physdev = android::base::find(mDeviceToPhysicalDevice, device);
3583             if (!physdev) {
3584                 // User app gave an invalid VkDevice, but we don't really want to crash here.
3585                 // We should allow invalid apps.
3586                 return VK_ERROR_DEVICE_LOST;
3587             }
3588 
3589             auto* physdevInfo = android::base::find(mPhysdevInfo, *physdev);
3590             if (!physdevInfo) {
3591                 // If this fails, we crash, as we assume that the memory properties map should have
3592                 // the info.
3593                 fprintf(stderr, "Error: Could not get memory properties for VkPhysicalDevice\n");
3594             }
3595 
3596             // If the memory was allocated with a type index that corresponds
3597             // to a memory type that is host visible, let's also map the entire
3598             // thing.
3599 
3600             // First, check validity of the user's type index.
3601             if (localAllocInfo.memoryTypeIndex >= physdevInfo->memoryProperties.memoryTypeCount) {
3602                 // Continue allowing invalid behavior.
3603                 return VK_ERROR_INCOMPATIBLE_DRIVER;
3604             }
3605             memoryPropertyFlags =
3606                 physdevInfo->memoryProperties.memoryTypes[localAllocInfo.memoryTypeIndex]
3607                     .propertyFlags;
3608         }
3609 
3610         if (shouldUseDedicatedAllocInfo) {
3611             vk_append_struct(&structChainIter, &localDedicatedAllocInfo);
3612         }
3613 
3614         VkExportMemoryAllocateInfo exportAllocate = {
3615             .sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
3616             .pNext = NULL,
3617         };
3618 
3619 #ifdef __unix__
3620         exportAllocate.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
3621 #endif
3622 
3623 #ifdef __linux__
3624         if (hasDeviceExtension(device, VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME)) {
3625             exportAllocate.handleTypes |= VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
3626         }
3627 #endif
3628 
3629 #ifdef _WIN32
3630         exportAllocate.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT;
3631 #endif
3632 
3633         bool hostVisible = memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3634         if (hostVisible && feature_is_enabled(kFeature_ExternalBlob)) {
3635             vk_append_struct(&structChainIter, &exportAllocate);
3636         }
3637 
3638         if (createBlobInfoPtr && createBlobInfoPtr->blobMem == STREAM_BLOB_MEM_GUEST &&
3639             (createBlobInfoPtr->blobFlags & STREAM_BLOB_FLAG_CREATE_GUEST_HANDLE)) {
3640             DescriptorType rawDescriptor;
3641             auto descriptorInfoOpt =
3642                 BlobManager::get()->removeDescriptorInfo(tInfo->ctx_id, createBlobInfoPtr->blobId);
3643             if (descriptorInfoOpt) {
3644                 auto rawDescriptorOpt = (*descriptorInfoOpt).descriptor.release();
3645                 if (rawDescriptorOpt) {
3646                     rawDescriptor = *rawDescriptorOpt;
3647                 } else {
3648                     ERR("Failed vkAllocateMemory: missing raw descriptor.");
3649                     return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3650                 }
3651             } else {
3652                 ERR("Failed vkAllocateMemory: missing descriptor info.");
3653                 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3654             }
3655 #if defined(__linux__)
3656             importInfo.fd = rawDescriptor;
3657 #endif
3658 
3659 #ifdef __linux__
3660             if (hasDeviceExtension(device, VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME)) {
3661                 importInfo.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
3662             }
3663 #endif
3664             vk_append_struct(&structChainIter, &importInfo);
3665         }
3666 
3667         VkImportMemoryHostPointerInfoEXT importHostInfo;
3668         std::optional<SharedMemory> sharedMemory = std::nullopt;
3669 
3670         // TODO(b/261222354): Make sure the feature exists when initializing sVkEmulation.
3671         if (hostVisible && feature_is_enabled(kFeature_SystemBlob)) {
3672             // Ensure size is page-aligned.
3673             VkDeviceSize alignedSize = __ALIGN(localAllocInfo.allocationSize, kPageSizeforBlob);
3674             if (alignedSize != localAllocInfo.allocationSize) {
3675                 ERR("Warning: Aligning allocation size from %llu to %llu",
3676                     static_cast<unsigned long long>(localAllocInfo.allocationSize),
3677                     static_cast<unsigned long long>(alignedSize));
3678             }
3679             localAllocInfo.allocationSize = alignedSize;
3680 
3681             static std::atomic<uint64_t> uniqueShmemId = 0;
3682             sharedMemory = SharedMemory("shared-memory-vk-" + std::to_string(uniqueShmemId++),
3683                                         localAllocInfo.allocationSize);
3684             int ret = sharedMemory->create(0600);
3685             if (ret) {
3686                 ERR("Failed to create system-blob host-visible memory, error: %d", ret);
3687                 return VK_ERROR_OUT_OF_HOST_MEMORY;
3688             }
3689             mappedPtr = sharedMemory->get();
3690             int mappedPtrAlignment = reinterpret_cast<uintptr_t>(mappedPtr) % kPageSizeforBlob;
3691             if (mappedPtrAlignment != 0) {
3692                 ERR("Warning: Mapped shared memory pointer is not aligned to page size, alignment "
3693                     "is: %d",
3694                     mappedPtrAlignment);
3695             }
3696             importHostInfo = {.sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
3697                               .pNext = NULL,
3698                               .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
3699                               .pHostPointer = mappedPtr};
3700             localAllocInfo.pNext = &importHostInfo;
3701         }
3702 
3703         VkResult result = vk->vkAllocateMemory(device, &localAllocInfo, pAllocator, pMemory);
3704 
3705         if (result != VK_SUCCESS) {
3706             return result;
3707         }
3708 
3709 #ifdef _WIN32
3710         // Let ManagedDescriptor to close the underlying HANDLE when going out of scope. From the
3711         // VkImportMemoryWin32HandleInfoKHR spec: Importing memory object payloads from Windows
3712         // handles does not transfer ownership of the handle to the Vulkan implementation. For
3713         // handle types defined as NT handles, the application must release handle ownership using
3714         // the CloseHandle system call when the handle is no longer needed. For handle types defined
3715         // as NT handles, the imported memory object holds a reference to its payload.
3716 #else
3717         // Tell ManagedDescriptor not to close the underlying fd, because the ownership has already
3718         // been transferred to the Vulkan implementation. From VkImportMemoryFdInfoKHR spec:
3719         // Importing memory from a file descriptor transfers ownership of the file descriptor from
3720         // the application to the Vulkan implementation. The application must not perform any
3721         // operations on the file descriptor after a successful import. The imported memory object
3722         // holds a reference to its payload.
3723         externalMemoryHandle.release();
3724 #endif
3725 
3726         std::lock_guard<std::recursive_mutex> lock(mLock);
3727 
3728         mMemoryInfo[*pMemory] = MemoryInfo();
3729         auto& memoryInfo = mMemoryInfo[*pMemory];
3730         memoryInfo.size = localAllocInfo.allocationSize;
3731         memoryInfo.device = device;
3732         memoryInfo.memoryIndex = localAllocInfo.memoryTypeIndex;
3733 #if defined(__APPLE__) && defined(VK_MVK_moltenvk)
3734         if (importCbInfoPtr && m_emu->instanceSupportsMoltenVK) {
3735             memoryInfo.mtlTexture = getColorBufferMTLTexture(importCbInfoPtr->colorBuffer);
3736         }
3737 #endif
3738 
3739         if (importCbInfoPtr && !mGuestUsesAngle) {
3740             memoryInfo.boundColorBuffer = importCbInfoPtr->colorBuffer;
3741         }
3742 
3743         if (!hostVisible) {
3744             *pMemory = new_boxed_non_dispatchable_VkDeviceMemory(*pMemory);
3745             return result;
3746         }
3747 
3748         if (memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) {
3749             memoryInfo.caching = MAP_CACHE_CACHED;
3750         } else if (memoryPropertyFlags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD) {
3751             memoryInfo.caching = MAP_CACHE_UNCACHED;
3752         } else if (memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) {
3753             memoryInfo.caching = MAP_CACHE_WC;
3754         }
3755 
3756         VkInstance* instance = deviceToInstanceLocked(device);
3757         InstanceInfo* instanceInfo = android::base::find(mInstanceInfo, *instance);
3758         auto* deviceInfo = android::base::find(mDeviceInfo, device);
3759         if (!deviceInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
3760 
3761         // If gfxstream needs to be able to read from this memory, needToMap should be true.
3762         // When external blobs are off, we always want to map HOST_VISIBLE memory. Because, we run
3763         // in the same process as the guest.
3764         // When external blobs are on, we want to map memory only if a workaround is using it in
3765         // the gfxstream process. This happens when ASTC CPU emulation is on.
3766         bool needToMap =
3767             (!feature_is_enabled(kFeature_ExternalBlob) ||
3768              (deviceInfo->useAstcCpuDecompression && deviceInfo->emulateTextureAstc)) &&
3769             !createBlobInfoPtr;
3770 
3771         // Some cases provide a mappedPtr, so we only map if we still don't have a pointer here.
3772         if (!mappedPtr && needToMap) {
3773             memoryInfo.needUnmap = true;
3774             VkResult mapResult =
3775                 vk->vkMapMemory(device, *pMemory, 0, memoryInfo.size, 0, &memoryInfo.ptr);
3776             if (mapResult != VK_SUCCESS) {
3777                 freeMemoryLocked(vk, device, *pMemory, pAllocator);
3778                 *pMemory = VK_NULL_HANDLE;
3779                 return VK_ERROR_OUT_OF_HOST_MEMORY;
3780             }
3781         } else {
3782             // Since we didn't call vkMapMemory, unmapping is not needed (don't own mappedPtr).
3783             memoryInfo.needUnmap = false;
3784             memoryInfo.ptr = mappedPtr;
3785 
3786             if (createBlobInfoPtr) {
3787                 memoryInfo.blobId = createBlobInfoPtr->blobId;
3788             }
3789 
3790             // Always assign the shared memory into memoryInfo. If it was used, then it will have
3791             // ownership transferred.
3792             memoryInfo.sharedMemory = std::exchange(sharedMemory, std::nullopt);
3793         }
3794 
3795         *pMemory = new_boxed_non_dispatchable_VkDeviceMemory(*pMemory);
3796 
3797         return result;
3798     }
3799 
freeMemoryLocked(VulkanDispatch * vk,VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)3800     void freeMemoryLocked(VulkanDispatch* vk, VkDevice device, VkDeviceMemory memory,
3801                           const VkAllocationCallbacks* pAllocator) {
3802         auto* info = android::base::find(mMemoryInfo, memory);
3803         if (!info) return;  // Invalid usage.
3804 
3805 #ifdef __APPLE__
3806         if (info->mtlTexture) {
3807             CFRelease(info->mtlTexture);
3808             info->mtlTexture = nullptr;
3809         }
3810 #endif
3811 
3812         if (info->directMapped) {
3813             // if direct mapped, we leave it up to the guest address space driver
3814             // to control the unmapping of kvm slot on the host side
3815             // in order to avoid situations where
3816             //
3817             // 1. we try to unmap here and deadlock
3818             //
3819             // 2. unmapping at the wrong time (possibility of a parallel call
3820             // to unmap vs. address space allocate and mapMemory leading to
3821             // mapping the same gpa twice)
3822             if (mUseOldMemoryCleanupPath) {
3823                 unmapMemoryAtGpaIfExists(info->guestPhysAddr);
3824             }
3825         }
3826 
3827         if (info->virtioGpuMapped) {
3828             if (mLogging) {
3829                 fprintf(stderr, "%s: unmap hostmem %p id 0x%llx\n", __func__, info->ptr,
3830                         (unsigned long long)info->hostmemId);
3831             }
3832         }
3833 
3834         if (info->needUnmap && info->ptr) {
3835             vk->vkUnmapMemory(device, memory);
3836         }
3837 
3838         vk->vkFreeMemory(device, memory, pAllocator);
3839 
3840         mMemoryInfo.erase(memory);
3841     }
3842 
on_vkFreeMemory(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)3843     void on_vkFreeMemory(android::base::BumpPool* pool, VkDevice boxed_device,
3844                          VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) {
3845         auto device = unbox_VkDevice(boxed_device);
3846         auto vk = dispatch_VkDevice(boxed_device);
3847 
3848         if (!device || !vk) {
3849             return;
3850         }
3851 
3852         std::lock_guard<std::recursive_mutex> lock(mLock);
3853 
3854         freeMemoryLocked(vk, device, memory, pAllocator);
3855     }
3856 
on_vkMapMemory(android::base::BumpPool * pool,VkDevice,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)3857     VkResult on_vkMapMemory(android::base::BumpPool* pool, VkDevice, VkDeviceMemory memory,
3858                             VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags,
3859                             void** ppData) {
3860         std::lock_guard<std::recursive_mutex> lock(mLock);
3861         return on_vkMapMemoryLocked(0, memory, offset, size, flags, ppData);
3862     }
on_vkMapMemoryLocked(VkDevice,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)3863     VkResult on_vkMapMemoryLocked(VkDevice, VkDeviceMemory memory, VkDeviceSize offset,
3864                                   VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) {
3865         auto* info = android::base::find(mMemoryInfo, memory);
3866         if (!info || !info->ptr) return VK_ERROR_MEMORY_MAP_FAILED;  // Invalid usage.
3867 
3868         *ppData = (void*)((uint8_t*)info->ptr + offset);
3869         return VK_SUCCESS;
3870     }
3871 
on_vkUnmapMemory(android::base::BumpPool * pool,VkDevice,VkDeviceMemory)3872     void on_vkUnmapMemory(android::base::BumpPool* pool, VkDevice, VkDeviceMemory) {
3873         // no-op; user-level mapping does not correspond
3874         // to any operation here.
3875     }
3876 
getMappedHostPointer(VkDeviceMemory memory)3877     uint8_t* getMappedHostPointer(VkDeviceMemory memory) {
3878         std::lock_guard<std::recursive_mutex> lock(mLock);
3879 
3880         auto* info = android::base::find(mMemoryInfo, memory);
3881         if (!info) return nullptr;
3882 
3883         return (uint8_t*)(info->ptr);
3884     }
3885 
getDeviceMemorySize(VkDeviceMemory memory)3886     VkDeviceSize getDeviceMemorySize(VkDeviceMemory memory) {
3887         std::lock_guard<std::recursive_mutex> lock(mLock);
3888 
3889         auto* info = android::base::find(mMemoryInfo, memory);
3890         if (!info) return 0;
3891 
3892         return info->size;
3893     }
3894 
usingDirectMapping() const3895     bool usingDirectMapping() const {
3896         return feature_is_enabled(kFeature_GLDirectMem) ||
3897                feature_is_enabled(kFeature_VirtioGpuNext);
3898     }
3899 
getHostFeatureSupport() const3900     HostFeatureSupport getHostFeatureSupport() const {
3901         HostFeatureSupport res;
3902 
3903         if (!m_vk) return res;
3904 
3905         auto emu = getGlobalVkEmulation();
3906 
3907         res.supportsVulkan = emu && emu->live;
3908 
3909         if (!res.supportsVulkan) return res;
3910 
3911         const auto& props = emu->deviceInfo.physdevProps;
3912 
3913         res.supportsVulkan1_1 = props.apiVersion >= VK_API_VERSION_1_1;
3914         res.useDeferredCommands = emu->useDeferredCommands;
3915         res.useCreateResourcesWithRequirements = emu->useCreateResourcesWithRequirements;
3916 
3917         res.apiVersion = props.apiVersion;
3918         res.driverVersion = props.driverVersion;
3919         res.deviceID = props.deviceID;
3920         res.vendorID = props.vendorID;
3921         return res;
3922     }
3923 
hasInstanceExtension(VkInstance instance,const std::string & name)3924     bool hasInstanceExtension(VkInstance instance, const std::string& name) {
3925         auto* info = android::base::find(mInstanceInfo, instance);
3926         if (!info) return false;
3927 
3928         for (const auto& enabledName : info->enabledExtensionNames) {
3929             if (name == enabledName) return true;
3930         }
3931 
3932         return false;
3933     }
3934 
hasDeviceExtension(VkDevice device,const std::string & name)3935     bool hasDeviceExtension(VkDevice device, const std::string& name) {
3936         auto* info = android::base::find(mDeviceInfo, device);
3937         if (!info) return false;
3938 
3939         for (const auto& enabledName : info->enabledExtensionNames) {
3940             if (name == enabledName) return true;
3941         }
3942 
3943         return false;
3944     }
3945 
3946     // Returns whether a vector of VkExtensionProperties contains a particular extension
hasDeviceExtension(const std::vector<VkExtensionProperties> & properties,const char * name)3947     bool hasDeviceExtension(const std::vector<VkExtensionProperties>& properties,
3948                             const char* name) {
3949         for (const auto& prop : properties) {
3950             if (strcmp(prop.extensionName, name) == 0) return true;
3951         }
3952         return false;
3953     }
3954 
3955     // Convenience function to call vkEnumerateDeviceExtensionProperties and get the results as an
3956     // std::vector
enumerateDeviceExtensionProperties(VulkanDispatch * vk,VkPhysicalDevice physicalDevice,const char * pLayerName,std::vector<VkExtensionProperties> & properties)3957     VkResult enumerateDeviceExtensionProperties(VulkanDispatch* vk, VkPhysicalDevice physicalDevice,
3958                                                 const char* pLayerName,
3959                                                 std::vector<VkExtensionProperties>& properties) {
3960         uint32_t propertyCount = 0;
3961         VkResult result = vk->vkEnumerateDeviceExtensionProperties(physicalDevice, pLayerName,
3962                                                                    &propertyCount, nullptr);
3963         if (result != VK_SUCCESS) return result;
3964 
3965         properties.resize(propertyCount);
3966         return vk->vkEnumerateDeviceExtensionProperties(physicalDevice, pLayerName, &propertyCount,
3967                                                         properties.data());
3968     }
3969 
3970     // VK_ANDROID_native_buffer
on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool * pool,VkDevice,VkFormat format,VkImageUsageFlags imageUsage,int * grallocUsage)3971     VkResult on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool* pool, VkDevice,
3972                                                   VkFormat format, VkImageUsageFlags imageUsage,
3973                                                   int* grallocUsage) {
3974         getGralloc0Usage(format, imageUsage, grallocUsage);
3975         return VK_SUCCESS;
3976     }
3977 
on_vkGetSwapchainGrallocUsage2ANDROID(android::base::BumpPool * pool,VkDevice,VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)3978     VkResult on_vkGetSwapchainGrallocUsage2ANDROID(
3979         android::base::BumpPool* pool, VkDevice, VkFormat format, VkImageUsageFlags imageUsage,
3980         VkSwapchainImageUsageFlagsANDROID swapchainImageUsage, uint64_t* grallocConsumerUsage,
3981         uint64_t* grallocProducerUsage) {
3982         getGralloc1Usage(format, imageUsage, swapchainImageUsage, grallocConsumerUsage,
3983                          grallocProducerUsage);
3984         return VK_SUCCESS;
3985     }
3986 
on_vkAcquireImageANDROID(android::base::BumpPool * pool,VkDevice boxed_device,VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)3987     VkResult on_vkAcquireImageANDROID(android::base::BumpPool* pool, VkDevice boxed_device,
3988                                       VkImage image, int nativeFenceFd, VkSemaphore semaphore,
3989                                       VkFence fence) {
3990         auto device = unbox_VkDevice(boxed_device);
3991         auto vk = dispatch_VkDevice(boxed_device);
3992 
3993         std::lock_guard<std::recursive_mutex> lock(mLock);
3994 
3995         auto* imageInfo = android::base::find(mImageInfo, image);
3996         if (!imageInfo) {
3997             return VK_ERROR_INITIALIZATION_FAILED;
3998         }
3999 
4000         VkQueue defaultQueue;
4001         uint32_t defaultQueueFamilyIndex;
4002         Lock* defaultQueueLock;
4003         if (!getDefaultQueueForDeviceLocked(device, &defaultQueue, &defaultQueueFamilyIndex,
4004                                             &defaultQueueLock)) {
4005             fprintf(stderr, "%s: cant get the default q\n", __func__);
4006             return VK_ERROR_INITIALIZATION_FAILED;
4007         }
4008 
4009         AndroidNativeBufferInfo* anbInfo = imageInfo->anbInfo.get();
4010 
4011         return setAndroidNativeImageSemaphoreSignaled(vk, device, defaultQueue,
4012                                                       defaultQueueFamilyIndex, defaultQueueLock,
4013                                                       semaphore, fence, anbInfo);
4014     }
4015 
on_vkQueueSignalReleaseImageANDROID(android::base::BumpPool * pool,VkQueue boxed_queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)4016     VkResult on_vkQueueSignalReleaseImageANDROID(android::base::BumpPool* pool, VkQueue boxed_queue,
4017                                                  uint32_t waitSemaphoreCount,
4018                                                  const VkSemaphore* pWaitSemaphores, VkImage image,
4019                                                  int* pNativeFenceFd) {
4020         auto queue = unbox_VkQueue(boxed_queue);
4021         auto vk = dispatch_VkQueue(boxed_queue);
4022 
4023         std::lock_guard<std::recursive_mutex> lock(mLock);
4024 
4025         auto* queueInfo = android::base::find(mQueueInfo, queue);
4026         if (!queueInfo) return VK_ERROR_INITIALIZATION_FAILED;
4027 
4028         if (mRenderDocWithMultipleVkInstances) {
4029             VkPhysicalDevice vkPhysicalDevice = mDeviceToPhysicalDevice.at(queueInfo->device);
4030             VkInstance vkInstance = mPhysicalDeviceToInstance.at(vkPhysicalDevice);
4031             mRenderDocWithMultipleVkInstances->onFrameDelimiter(vkInstance);
4032         }
4033 
4034         auto* imageInfo = android::base::find(mImageInfo, image);
4035         auto anbInfo = imageInfo->anbInfo;
4036 
4037         if (anbInfo->useVulkanNativeImage) {
4038             // vkQueueSignalReleaseImageANDROID() is only called by the Android framework's
4039             // implementation of vkQueuePresentKHR(). The guest application is responsible for
4040             // transitioning the image layout of the image passed to vkQueuePresentKHR() to
4041             // VK_IMAGE_LAYOUT_PRESENT_SRC_KHR before the call. If the host is using native
4042             // Vulkan images where `image` is backed with the same memory as its ColorBuffer,
4043             // then we need to update the tracked layout for that ColorBuffer.
4044             setColorBufferCurrentLayout(anbInfo->colorBufferHandle,
4045                                         VK_IMAGE_LAYOUT_PRESENT_SRC_KHR);
4046         }
4047 
4048         return syncImageToColorBuffer(vk, queueInfo->queueFamilyIndex, queue, queueInfo->lock,
4049                                       waitSemaphoreCount, pWaitSemaphores, pNativeFenceFd, anbInfo);
4050     }
4051 
on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory,uint64_t * pAddress)4052     VkResult on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool* pool,
4053                                                   VkDevice boxed_device, VkDeviceMemory memory,
4054                                                   uint64_t* pAddress) {
4055         auto device = unbox_VkDevice(boxed_device);
4056         auto vk = dispatch_VkDevice(boxed_device);
4057 
4058         if (!feature_is_enabled(kFeature_GLDirectMem)) {
4059             fprintf(stderr,
4060                     "FATAL: Tried to use direct mapping "
4061                     "while GLDirectMem is not enabled!\n");
4062         }
4063 
4064         std::lock_guard<std::recursive_mutex> lock(mLock);
4065 
4066         if (mLogging) {
4067             fprintf(stderr, "%s: deviceMemory: 0x%llx pAddress: 0x%llx\n", __func__,
4068                     (unsigned long long)memory, (unsigned long long)(*pAddress));
4069         }
4070 
4071         if (!mapHostVisibleMemoryToGuestPhysicalAddressLocked(vk, device, memory, *pAddress)) {
4072             return VK_ERROR_OUT_OF_HOST_MEMORY;
4073         }
4074 
4075         auto* info = android::base::find(mMemoryInfo, memory);
4076         if (!info) return VK_ERROR_INITIALIZATION_FAILED;
4077 
4078         *pAddress = (uint64_t)(uintptr_t)info->ptr;
4079 
4080         return VK_SUCCESS;
4081     }
4082 
vkGetBlobInternal(VkDevice boxed_device,VkDeviceMemory memory,uint64_t hostBlobId)4083     VkResult vkGetBlobInternal(VkDevice boxed_device, VkDeviceMemory memory, uint64_t hostBlobId) {
4084         std::lock_guard<std::recursive_mutex> lock(mLock);
4085         auto* tInfo = RenderThreadInfoVk::get();
4086 
4087         auto* info = android::base::find(mMemoryInfo, memory);
4088         if (!info) return VK_ERROR_OUT_OF_HOST_MEMORY;
4089 
4090         hostBlobId = (info->blobId && !hostBlobId) ? info->blobId : hostBlobId;
4091 
4092         if (feature_is_enabled(kFeature_SystemBlob) && info->sharedMemory.has_value()) {
4093             uint32_t handleType = STREAM_MEM_HANDLE_TYPE_SHM;
4094             // We transfer ownership of the shared memory handle to the descriptor info.
4095             // The memory itself is destroyed only when all processes unmap / release their
4096             // handles.
4097             BlobManager::get()->addDescriptorInfo(tInfo->ctx_id, hostBlobId,
4098                                                   info->sharedMemory->releaseHandle(), handleType,
4099                                                   info->caching, std::nullopt);
4100         } else if (feature_is_enabled(kFeature_ExternalBlob)) {
4101             VkResult result;
4102             auto device = unbox_VkDevice(boxed_device);
4103             DescriptorType handle;
4104             uint32_t handleType;
4105             struct VulkanInfo vulkanInfo = {
4106                 .memoryIndex = info->memoryIndex,
4107             };
4108             memcpy(vulkanInfo.deviceUUID, m_emu->deviceInfo.idProps.deviceUUID,
4109                    sizeof(vulkanInfo.deviceUUID));
4110             memcpy(vulkanInfo.driverUUID, m_emu->deviceInfo.idProps.driverUUID,
4111                    sizeof(vulkanInfo.driverUUID));
4112 
4113 #ifdef __unix__
4114             VkMemoryGetFdInfoKHR getFd = {
4115                 .sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
4116                 .pNext = nullptr,
4117                 .memory = memory,
4118                 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
4119             };
4120 
4121             handleType = STREAM_MEM_HANDLE_TYPE_OPAQUE_FD;
4122 #endif
4123 
4124 #ifdef __linux__
4125             if (hasDeviceExtension(device, VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME)) {
4126                 getFd.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
4127                 handleType = STREAM_MEM_HANDLE_TYPE_DMABUF;
4128             }
4129 #endif
4130 
4131 #ifdef __unix__
4132             result = m_emu->deviceInfo.getMemoryHandleFunc(device, &getFd, &handle);
4133             if (result != VK_SUCCESS) {
4134                 return result;
4135             }
4136 #endif
4137 
4138 #ifdef _WIN32
4139             VkMemoryGetWin32HandleInfoKHR getHandle = {
4140                 .sType = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR,
4141                 .pNext = nullptr,
4142                 .memory = memory,
4143                 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
4144             };
4145 
4146             handleType = STREAM_MEM_HANDLE_TYPE_OPAQUE_WIN32;
4147 
4148             result = m_emu->deviceInfo.getMemoryHandleFunc(device, &getHandle, &handle);
4149             if (result != VK_SUCCESS) {
4150                 return result;
4151             }
4152 #endif
4153 
4154             ManagedDescriptor managedHandle(handle);
4155             BlobManager::get()->addDescriptorInfo(
4156                 tInfo->ctx_id, hostBlobId, std::move(managedHandle), handleType, info->caching,
4157                 std::optional<VulkanInfo>(vulkanInfo));
4158         } else if (!info->needUnmap) {
4159             auto device = unbox_VkDevice(boxed_device);
4160             auto vk = dispatch_VkDevice(boxed_device);
4161 
4162             VkResult mapResult = vk->vkMapMemory(device, memory, 0, info->size, 0, &info->ptr);
4163             if (mapResult != VK_SUCCESS) {
4164                 return VK_ERROR_OUT_OF_HOST_MEMORY;
4165             }
4166 
4167             info->needUnmap = true;
4168         }
4169 
4170         if (info->needUnmap) {
4171             uint64_t hva = (uint64_t)(uintptr_t)(info->ptr);
4172             uint64_t size = (uint64_t)(uintptr_t)(info->size);
4173 
4174             uint64_t alignedHva = hva & kPageMaskForBlob;
4175             uint64_t alignedSize =
4176                 kPageSizeforBlob * ((size + kPageSizeforBlob - 1) / kPageSizeforBlob);
4177 
4178             if (hva != alignedHva) {
4179                 ERR("Mapping non page-size (0x%" PRIx64
4180                     ") aligned host virtual address:%p "
4181                     "using the aligned host virtual address:%p. The underlying resources "
4182                     "using this blob may be corrupted/offset.",
4183                     kPageSizeforBlob, hva, alignedHva);
4184             }
4185 
4186             BlobManager::get()->addMapping(tInfo->ctx_id, hostBlobId, (void*)(uintptr_t)alignedHva,
4187                                            info->caching);
4188             info->virtioGpuMapped = true;
4189             info->hostmemId = hostBlobId;
4190         }
4191 
4192         return VK_SUCCESS;
4193     }
4194 
on_vkGetBlobGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory)4195     VkResult on_vkGetBlobGOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
4196                                 VkDeviceMemory memory) {
4197         return vkGetBlobInternal(boxed_device, memory, 0);
4198     }
4199 
on_vkGetMemoryHostAddressInfoGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory,uint64_t * pAddress,uint64_t * pSize,uint64_t * pHostmemId)4200     VkResult on_vkGetMemoryHostAddressInfoGOOGLE(android::base::BumpPool* pool,
4201                                                  VkDevice boxed_device, VkDeviceMemory memory,
4202                                                  uint64_t* pAddress, uint64_t* pSize,
4203                                                  uint64_t* pHostmemId) {
4204         hostBlobId++;
4205         *pHostmemId = hostBlobId;
4206         return vkGetBlobInternal(boxed_device, memory, hostBlobId);
4207     }
4208 
on_vkFreeMemorySyncGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)4209     VkResult on_vkFreeMemorySyncGOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
4210                                        VkDeviceMemory memory,
4211                                        const VkAllocationCallbacks* pAllocator) {
4212         on_vkFreeMemory(pool, boxed_device, memory, pAllocator);
4213 
4214         return VK_SUCCESS;
4215     }
4216 
on_vkAllocateCommandBuffers(android::base::BumpPool * pool,VkDevice boxed_device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)4217     VkResult on_vkAllocateCommandBuffers(android::base::BumpPool* pool, VkDevice boxed_device,
4218                                          const VkCommandBufferAllocateInfo* pAllocateInfo,
4219                                          VkCommandBuffer* pCommandBuffers) {
4220         auto device = unbox_VkDevice(boxed_device);
4221         auto vk = dispatch_VkDevice(boxed_device);
4222 
4223         VkResult result = vk->vkAllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
4224 
4225         if (result != VK_SUCCESS) {
4226             return result;
4227         }
4228 
4229         std::lock_guard<std::recursive_mutex> lock(mLock);
4230 
4231         auto* deviceInfo = android::base::find(mDeviceInfo, device);
4232         if (!deviceInfo) return VK_ERROR_UNKNOWN;
4233 
4234         for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
4235             mCmdBufferInfo[pCommandBuffers[i]] = CommandBufferInfo();
4236             mCmdBufferInfo[pCommandBuffers[i]].device = device;
4237             mCmdBufferInfo[pCommandBuffers[i]].debugUtilsHelper = deviceInfo->debugUtilsHelper;
4238             mCmdBufferInfo[pCommandBuffers[i]].cmdPool = pAllocateInfo->commandPool;
4239             auto boxed = new_boxed_VkCommandBuffer(pCommandBuffers[i], vk,
4240                                                    false /* does not own dispatch */);
4241             mCmdBufferInfo[pCommandBuffers[i]].boxed = boxed;
4242             pCommandBuffers[i] = (VkCommandBuffer)boxed;
4243         }
4244         return result;
4245     }
4246 
on_vkCreateCommandPool(android::base::BumpPool * pool,VkDevice boxed_device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)4247     VkResult on_vkCreateCommandPool(android::base::BumpPool* pool, VkDevice boxed_device,
4248                                     const VkCommandPoolCreateInfo* pCreateInfo,
4249                                     const VkAllocationCallbacks* pAllocator,
4250                                     VkCommandPool* pCommandPool) {
4251         auto device = unbox_VkDevice(boxed_device);
4252         auto vk = dispatch_VkDevice(boxed_device);
4253 
4254         VkResult result = vk->vkCreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
4255         if (result != VK_SUCCESS) {
4256             return result;
4257         }
4258         std::lock_guard<std::recursive_mutex> lock(mLock);
4259         mCmdPoolInfo[*pCommandPool] = CommandPoolInfo();
4260         auto& cmdPoolInfo = mCmdPoolInfo[*pCommandPool];
4261         cmdPoolInfo.device = device;
4262 
4263         *pCommandPool = new_boxed_non_dispatchable_VkCommandPool(*pCommandPool);
4264         cmdPoolInfo.boxed = *pCommandPool;
4265 
4266         return result;
4267     }
4268 
on_vkDestroyCommandPool(android::base::BumpPool * pool,VkDevice boxed_device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)4269     void on_vkDestroyCommandPool(android::base::BumpPool* pool, VkDevice boxed_device,
4270                                  VkCommandPool commandPool,
4271                                  const VkAllocationCallbacks* pAllocator) {
4272         auto device = unbox_VkDevice(boxed_device);
4273         auto vk = dispatch_VkDevice(boxed_device);
4274 
4275         vk->vkDestroyCommandPool(device, commandPool, pAllocator);
4276         std::lock_guard<std::recursive_mutex> lock(mLock);
4277         const auto* cmdPoolInfo = android::base::find(mCmdPoolInfo, commandPool);
4278         if (cmdPoolInfo) {
4279             removeCommandBufferInfo(cmdPoolInfo->cmdBuffers);
4280             mCmdPoolInfo.erase(commandPool);
4281         }
4282     }
4283 
on_vkResetCommandPool(android::base::BumpPool * pool,VkDevice boxed_device,VkCommandPool commandPool,VkCommandPoolResetFlags flags)4284     VkResult on_vkResetCommandPool(android::base::BumpPool* pool, VkDevice boxed_device,
4285                                    VkCommandPool commandPool, VkCommandPoolResetFlags flags) {
4286         auto device = unbox_VkDevice(boxed_device);
4287         auto vk = dispatch_VkDevice(boxed_device);
4288 
4289         VkResult result = vk->vkResetCommandPool(device, commandPool, flags);
4290         if (result != VK_SUCCESS) {
4291             return result;
4292         }
4293         return result;
4294     }
4295 
on_vkCmdExecuteCommands(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)4296     void on_vkCmdExecuteCommands(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
4297                                  uint32_t commandBufferCount,
4298                                  const VkCommandBuffer* pCommandBuffers) {
4299         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4300         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4301 
4302         vk->vkCmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
4303         std::lock_guard<std::recursive_mutex> lock(mLock);
4304         CommandBufferInfo& cmdBuffer = mCmdBufferInfo[commandBuffer];
4305         cmdBuffer.subCmds.insert(cmdBuffer.subCmds.end(), pCommandBuffers,
4306                                  pCommandBuffers + commandBufferCount);
4307     }
4308 
dispatchVkQueueSubmit(VulkanDispatch * vk,VkQueue unboxed_queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)4309     VkResult dispatchVkQueueSubmit(VulkanDispatch* vk, VkQueue unboxed_queue, uint32_t submitCount,
4310                                    const VkSubmitInfo* pSubmits, VkFence fence) {
4311         return vk->vkQueueSubmit(unboxed_queue, submitCount, pSubmits, fence);
4312     }
4313 
dispatchVkQueueSubmit(VulkanDispatch * vk,VkQueue unboxed_queue,uint32_t submitCount,const VkSubmitInfo2 * pSubmits,VkFence fence)4314     VkResult dispatchVkQueueSubmit(VulkanDispatch* vk, VkQueue unboxed_queue, uint32_t submitCount,
4315                                    const VkSubmitInfo2* pSubmits, VkFence fence) {
4316         return vk->vkQueueSubmit2(unboxed_queue, submitCount, pSubmits, fence);
4317     }
4318 
4319     template <typename VkSubmitInfoType>
on_vkQueueSubmit(android::base::BumpPool * pool,VkQueue boxed_queue,uint32_t submitCount,const VkSubmitInfoType * pSubmits,VkFence fence)4320     VkResult on_vkQueueSubmit(android::base::BumpPool* pool, VkQueue boxed_queue,
4321                               uint32_t submitCount, const VkSubmitInfoType* pSubmits,
4322                               VkFence fence) {
4323         auto queue = unbox_VkQueue(boxed_queue);
4324         auto vk = dispatch_VkQueue(boxed_queue);
4325 
4326         Lock* ql;
4327         {
4328             std::lock_guard<std::recursive_mutex> lock(mLock);
4329 
4330             {
4331                 auto* queueInfo = android::base::find(mQueueInfo, queue);
4332                 if (queueInfo) {
4333                     sBoxedHandleManager.processDelayedRemovesGlobalStateLocked(queueInfo->device);
4334                 }
4335             }
4336 
4337             for (uint32_t i = 0; i < submitCount; i++) {
4338                 executePreprocessRecursive(pSubmits[i]);
4339             }
4340 
4341             auto* queueInfo = android::base::find(mQueueInfo, queue);
4342             if (!queueInfo) return VK_SUCCESS;
4343             ql = queueInfo->lock;
4344         }
4345 
4346         AutoLock qlock(*ql);
4347         auto result = dispatchVkQueueSubmit(vk, queue, submitCount, pSubmits, fence);
4348 
4349         // After vkQueueSubmit is called, we can signal the conditional variable
4350         // in FenceInfo, so that other threads (e.g. SyncThread) can call
4351         // waitForFence() on this fence.
4352         {
4353             std::lock_guard<std::recursive_mutex> lock(mLock);
4354             auto* fenceInfo = android::base::find(mFenceInfo, fence);
4355             if (fenceInfo) {
4356                 fenceInfo->state = FenceInfo::State::kWaitable;
4357                 fenceInfo->lock.lock();
4358                 fenceInfo->cv.signalAndUnlock(&fenceInfo->lock);
4359             }
4360         }
4361 
4362         return result;
4363     }
4364 
on_vkQueueWaitIdle(android::base::BumpPool * pool,VkQueue boxed_queue)4365     VkResult on_vkQueueWaitIdle(android::base::BumpPool* pool, VkQueue boxed_queue) {
4366         auto queue = unbox_VkQueue(boxed_queue);
4367         auto vk = dispatch_VkQueue(boxed_queue);
4368 
4369         if (!queue) return VK_SUCCESS;
4370 
4371         Lock* ql;
4372         {
4373             std::lock_guard<std::recursive_mutex> lock(mLock);
4374             auto* queueInfo = android::base::find(mQueueInfo, queue);
4375             if (!queueInfo) return VK_SUCCESS;
4376             ql = queueInfo->lock;
4377         }
4378 
4379         AutoLock qlock(*ql);
4380         return vk->vkQueueWaitIdle(queue);
4381     }
4382 
on_vkResetCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkCommandBufferResetFlags flags)4383     VkResult on_vkResetCommandBuffer(android::base::BumpPool* pool,
4384                                      VkCommandBuffer boxed_commandBuffer,
4385                                      VkCommandBufferResetFlags flags) {
4386         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4387         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4388 
4389         VkResult result = vk->vkResetCommandBuffer(commandBuffer, flags);
4390         if (VK_SUCCESS == result) {
4391             std::lock_guard<std::recursive_mutex> lock(mLock);
4392             auto& bufferInfo = mCmdBufferInfo[commandBuffer];
4393             bufferInfo.reset();
4394         }
4395         return result;
4396     }
4397 
on_vkFreeCommandBuffers(android::base::BumpPool * pool,VkDevice boxed_device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)4398     void on_vkFreeCommandBuffers(android::base::BumpPool* pool, VkDevice boxed_device,
4399                                  VkCommandPool commandPool, uint32_t commandBufferCount,
4400                                  const VkCommandBuffer* pCommandBuffers) {
4401         auto device = unbox_VkDevice(boxed_device);
4402         auto vk = dispatch_VkDevice(boxed_device);
4403 
4404         if (!device) return;
4405         vk->vkFreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
4406         std::lock_guard<std::recursive_mutex> lock(mLock);
4407         for (uint32_t i = 0; i < commandBufferCount; i++) {
4408             const auto& cmdBufferInfoIt = mCmdBufferInfo.find(pCommandBuffers[i]);
4409             if (cmdBufferInfoIt != mCmdBufferInfo.end()) {
4410                 const auto& cmdPoolInfoIt = mCmdPoolInfo.find(cmdBufferInfoIt->second.cmdPool);
4411                 if (cmdPoolInfoIt != mCmdPoolInfo.end()) {
4412                     cmdPoolInfoIt->second.cmdBuffers.erase(pCommandBuffers[i]);
4413                 }
4414                 // Done in decoder
4415                 // delete_VkCommandBuffer(cmdBufferInfoIt->second.boxed);
4416                 mCmdBufferInfo.erase(cmdBufferInfoIt);
4417             }
4418         }
4419     }
4420 
on_vkGetPhysicalDeviceExternalSemaphoreProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)4421     void on_vkGetPhysicalDeviceExternalSemaphoreProperties(
4422         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
4423         const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
4424         VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
4425         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
4426 
4427         if (!physicalDevice) {
4428             return;
4429         }
4430         // Cannot forward this call to driver because nVidia linux driver crahses on it.
4431         switch (pExternalSemaphoreInfo->handleType) {
4432             case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT:
4433                 pExternalSemaphoreProperties->exportFromImportedHandleTypes =
4434                     VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT;
4435                 pExternalSemaphoreProperties->compatibleHandleTypes =
4436                     VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT;
4437                 pExternalSemaphoreProperties->externalSemaphoreFeatures =
4438                     VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT |
4439                     VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT;
4440                 return;
4441             case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT:
4442                 pExternalSemaphoreProperties->exportFromImportedHandleTypes =
4443                     VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
4444                 pExternalSemaphoreProperties->compatibleHandleTypes =
4445                     VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
4446                 pExternalSemaphoreProperties->externalSemaphoreFeatures =
4447                     VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT |
4448                     VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT;
4449                 return;
4450             default:
4451                 break;
4452         }
4453 
4454         pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0;
4455         pExternalSemaphoreProperties->compatibleHandleTypes = 0;
4456         pExternalSemaphoreProperties->externalSemaphoreFeatures = 0;
4457     }
4458 
on_vkCreateDescriptorUpdateTemplate(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)4459     VkResult on_vkCreateDescriptorUpdateTemplate(
4460         android::base::BumpPool* pool, VkDevice boxed_device,
4461         const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
4462         const VkAllocationCallbacks* pAllocator,
4463         VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
4464         auto device = unbox_VkDevice(boxed_device);
4465         auto vk = dispatch_VkDevice(boxed_device);
4466 
4467         auto descriptorUpdateTemplateInfo = calcLinearizedDescriptorUpdateTemplateInfo(pCreateInfo);
4468 
4469         VkResult res =
4470             vk->vkCreateDescriptorUpdateTemplate(device, &descriptorUpdateTemplateInfo.createInfo,
4471                                                  pAllocator, pDescriptorUpdateTemplate);
4472 
4473         if (res == VK_SUCCESS) {
4474             registerDescriptorUpdateTemplate(*pDescriptorUpdateTemplate,
4475                                              descriptorUpdateTemplateInfo);
4476             *pDescriptorUpdateTemplate =
4477                 new_boxed_non_dispatchable_VkDescriptorUpdateTemplate(*pDescriptorUpdateTemplate);
4478         }
4479 
4480         return res;
4481     }
4482 
on_vkCreateDescriptorUpdateTemplateKHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)4483     VkResult on_vkCreateDescriptorUpdateTemplateKHR(
4484         android::base::BumpPool* pool, VkDevice boxed_device,
4485         const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
4486         const VkAllocationCallbacks* pAllocator,
4487         VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
4488         auto device = unbox_VkDevice(boxed_device);
4489         auto vk = dispatch_VkDevice(boxed_device);
4490 
4491         auto descriptorUpdateTemplateInfo = calcLinearizedDescriptorUpdateTemplateInfo(pCreateInfo);
4492 
4493         VkResult res = vk->vkCreateDescriptorUpdateTemplateKHR(
4494             device, &descriptorUpdateTemplateInfo.createInfo, pAllocator,
4495             pDescriptorUpdateTemplate);
4496 
4497         if (res == VK_SUCCESS) {
4498             registerDescriptorUpdateTemplate(*pDescriptorUpdateTemplate,
4499                                              descriptorUpdateTemplateInfo);
4500             *pDescriptorUpdateTemplate =
4501                 new_boxed_non_dispatchable_VkDescriptorUpdateTemplate(*pDescriptorUpdateTemplate);
4502         }
4503 
4504         return res;
4505     }
4506 
on_vkDestroyDescriptorUpdateTemplate(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)4507     void on_vkDestroyDescriptorUpdateTemplate(android::base::BumpPool* pool, VkDevice boxed_device,
4508                                               VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4509                                               const VkAllocationCallbacks* pAllocator) {
4510         auto device = unbox_VkDevice(boxed_device);
4511         auto vk = dispatch_VkDevice(boxed_device);
4512 
4513         vk->vkDestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
4514 
4515         unregisterDescriptorUpdateTemplate(descriptorUpdateTemplate);
4516     }
4517 
on_vkDestroyDescriptorUpdateTemplateKHR(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)4518     void on_vkDestroyDescriptorUpdateTemplateKHR(
4519         android::base::BumpPool* pool, VkDevice boxed_device,
4520         VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4521         const VkAllocationCallbacks* pAllocator) {
4522         auto device = unbox_VkDevice(boxed_device);
4523         auto vk = dispatch_VkDevice(boxed_device);
4524 
4525         vk->vkDestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
4526 
4527         unregisterDescriptorUpdateTemplate(descriptorUpdateTemplate);
4528     }
4529 
on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,uint32_t imageInfoCount,uint32_t bufferInfoCount,uint32_t bufferViewCount,const uint32_t * pImageInfoEntryIndices,const uint32_t * pBufferInfoEntryIndices,const uint32_t * pBufferViewEntryIndices,const VkDescriptorImageInfo * pImageInfos,const VkDescriptorBufferInfo * pBufferInfos,const VkBufferView * pBufferViews)4530     void on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
4531         android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
4532         VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
4533         uint32_t bufferInfoCount, uint32_t bufferViewCount, const uint32_t* pImageInfoEntryIndices,
4534         const uint32_t* pBufferInfoEntryIndices, const uint32_t* pBufferViewEntryIndices,
4535         const VkDescriptorImageInfo* pImageInfos, const VkDescriptorBufferInfo* pBufferInfos,
4536         const VkBufferView* pBufferViews) {
4537         auto device = unbox_VkDevice(boxed_device);
4538         auto vk = dispatch_VkDevice(boxed_device);
4539 
4540         std::lock_guard<std::recursive_mutex> lock(mLock);
4541         auto* info = android::base::find(mDescriptorUpdateTemplateInfo, descriptorUpdateTemplate);
4542         if (!info) return;
4543 
4544         memcpy(info->data.data() + info->imageInfoStart, pImageInfos,
4545                imageInfoCount * sizeof(VkDescriptorImageInfo));
4546         memcpy(info->data.data() + info->bufferInfoStart, pBufferInfos,
4547                bufferInfoCount * sizeof(VkDescriptorBufferInfo));
4548         memcpy(info->data.data() + info->bufferViewStart, pBufferViews,
4549                bufferViewCount * sizeof(VkBufferView));
4550 
4551         vk->vkUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate,
4552                                               info->data.data());
4553     }
4554 
on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,uint32_t imageInfoCount,uint32_t bufferInfoCount,uint32_t bufferViewCount,uint32_t inlineUniformBlockCount,const uint32_t * pImageInfoEntryIndices,const uint32_t * pBufferInfoEntryIndices,const uint32_t * pBufferViewEntryIndices,const VkDescriptorImageInfo * pImageInfos,const VkDescriptorBufferInfo * pBufferInfos,const VkBufferView * pBufferViews,const uint8_t * pInlineUniformBlockData)4555     void on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(
4556         android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
4557         VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
4558         uint32_t bufferInfoCount, uint32_t bufferViewCount, uint32_t inlineUniformBlockCount,
4559         const uint32_t* pImageInfoEntryIndices, const uint32_t* pBufferInfoEntryIndices,
4560         const uint32_t* pBufferViewEntryIndices, const VkDescriptorImageInfo* pImageInfos,
4561         const VkDescriptorBufferInfo* pBufferInfos, const VkBufferView* pBufferViews,
4562         const uint8_t* pInlineUniformBlockData) {
4563         auto device = unbox_VkDevice(boxed_device);
4564         auto vk = dispatch_VkDevice(boxed_device);
4565 
4566         std::lock_guard<std::recursive_mutex> lock(mLock);
4567         auto* info = android::base::find(mDescriptorUpdateTemplateInfo, descriptorUpdateTemplate);
4568         if (!info) return;
4569 
4570         memcpy(info->data.data() + info->imageInfoStart, pImageInfos,
4571                imageInfoCount * sizeof(VkDescriptorImageInfo));
4572         memcpy(info->data.data() + info->bufferInfoStart, pBufferInfos,
4573                bufferInfoCount * sizeof(VkDescriptorBufferInfo));
4574         memcpy(info->data.data() + info->bufferViewStart, pBufferViews,
4575                bufferViewCount * sizeof(VkBufferView));
4576         memcpy(info->data.data() + info->inlineUniformBlockStart, pInlineUniformBlockData,
4577                inlineUniformBlockCount);
4578 
4579         vk->vkUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate,
4580                                               info->data.data());
4581     }
4582 
hostSyncCommandBuffer(const char * tag,VkCommandBuffer boxed_commandBuffer,uint32_t needHostSync,uint32_t sequenceNumber)4583     void hostSyncCommandBuffer(const char* tag, VkCommandBuffer boxed_commandBuffer,
4584                                uint32_t needHostSync, uint32_t sequenceNumber) {
4585         auto nextDeadline = []() {
4586             return android::base::getUnixTimeUs() + 10000;  // 10 ms
4587         };
4588 
4589         auto timeoutDeadline = android::base::getUnixTimeUs() + 5000000;  // 5 s
4590 
4591         OrderMaintenanceInfo* order = ordmaint_VkCommandBuffer(boxed_commandBuffer);
4592         if (!order) return;
4593 
4594         AutoLock lock(order->lock);
4595 
4596         if (needHostSync) {
4597             while (
4598                 (sequenceNumber - __atomic_load_n(&order->sequenceNumber, __ATOMIC_ACQUIRE) != 1)) {
4599                 auto waitUntilUs = nextDeadline();
4600                 order->cv.timedWait(&order->lock, waitUntilUs);
4601 
4602                 if (timeoutDeadline < android::base::getUnixTimeUs()) {
4603                     break;
4604                 }
4605             }
4606         }
4607 
4608         __atomic_store_n(&order->sequenceNumber, sequenceNumber, __ATOMIC_RELEASE);
4609         order->cv.signal();
4610         releaseOrderMaintInfo(order);
4611     }
4612 
on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,uint32_t needHostSync,uint32_t sequenceNumber)4613     void on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool* pool,
4614                                           VkCommandBuffer commandBuffer, uint32_t needHostSync,
4615                                           uint32_t sequenceNumber) {
4616         this->hostSyncCommandBuffer("hostSync", commandBuffer, needHostSync, sequenceNumber);
4617     }
4618 
hostSyncQueue(const char * tag,VkQueue boxed_queue,uint32_t needHostSync,uint32_t sequenceNumber)4619     void hostSyncQueue(const char* tag, VkQueue boxed_queue, uint32_t needHostSync,
4620                        uint32_t sequenceNumber) {
4621         auto nextDeadline = []() {
4622             return android::base::getUnixTimeUs() + 10000;  // 10 ms
4623         };
4624 
4625         auto timeoutDeadline = android::base::getUnixTimeUs() + 5000000;  // 5 s
4626 
4627         OrderMaintenanceInfo* order = ordmaint_VkQueue(boxed_queue);
4628         if (!order) return;
4629 
4630         AutoLock lock(order->lock);
4631 
4632         if (needHostSync) {
4633             while (
4634                 (sequenceNumber - __atomic_load_n(&order->sequenceNumber, __ATOMIC_ACQUIRE) != 1)) {
4635                 auto waitUntilUs = nextDeadline();
4636                 order->cv.timedWait(&order->lock, waitUntilUs);
4637 
4638                 if (timeoutDeadline < android::base::getUnixTimeUs()) {
4639                     break;
4640                 }
4641             }
4642         }
4643 
4644         __atomic_store_n(&order->sequenceNumber, sequenceNumber, __ATOMIC_RELEASE);
4645         order->cv.signal();
4646         releaseOrderMaintInfo(order);
4647     }
4648 
on_vkQueueHostSyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t needHostSync,uint32_t sequenceNumber)4649     void on_vkQueueHostSyncGOOGLE(android::base::BumpPool* pool, VkQueue queue,
4650                                   uint32_t needHostSync, uint32_t sequenceNumber) {
4651         this->hostSyncQueue("hostSyncQueue", queue, needHostSync, sequenceNumber);
4652     }
4653 
on_vkCreateImageWithRequirementsGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage,VkMemoryRequirements * pMemoryRequirements)4654     VkResult on_vkCreateImageWithRequirementsGOOGLE(android::base::BumpPool* pool,
4655                                                     VkDevice boxed_device,
4656                                                     const VkImageCreateInfo* pCreateInfo,
4657                                                     const VkAllocationCallbacks* pAllocator,
4658                                                     VkImage* pImage,
4659                                                     VkMemoryRequirements* pMemoryRequirements) {
4660         if (pMemoryRequirements) {
4661             memset(pMemoryRequirements, 0, sizeof(*pMemoryRequirements));
4662         }
4663 
4664         VkResult imageCreateRes =
4665             on_vkCreateImage(pool, boxed_device, pCreateInfo, pAllocator, pImage);
4666 
4667         if (imageCreateRes != VK_SUCCESS) {
4668             return imageCreateRes;
4669         }
4670 
4671         on_vkGetImageMemoryRequirements(pool, boxed_device, unbox_VkImage(*pImage),
4672                                         pMemoryRequirements);
4673 
4674         return imageCreateRes;
4675     }
4676 
on_vkCreateBufferWithRequirementsGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer,VkMemoryRequirements * pMemoryRequirements)4677     VkResult on_vkCreateBufferWithRequirementsGOOGLE(android::base::BumpPool* pool,
4678                                                      VkDevice boxed_device,
4679                                                      const VkBufferCreateInfo* pCreateInfo,
4680                                                      const VkAllocationCallbacks* pAllocator,
4681                                                      VkBuffer* pBuffer,
4682                                                      VkMemoryRequirements* pMemoryRequirements) {
4683         if (pMemoryRequirements) {
4684             memset(pMemoryRequirements, 0, sizeof(*pMemoryRequirements));
4685         }
4686 
4687         VkResult bufferCreateRes =
4688             on_vkCreateBuffer(pool, boxed_device, pCreateInfo, pAllocator, pBuffer);
4689 
4690         if (bufferCreateRes != VK_SUCCESS) {
4691             return bufferCreateRes;
4692         }
4693 
4694         auto device = unbox_VkDevice(boxed_device);
4695         auto vk = dispatch_VkDevice(boxed_device);
4696 
4697         vk->vkGetBufferMemoryRequirements(device, unbox_VkBuffer(*pBuffer), pMemoryRequirements);
4698 
4699         return bufferCreateRes;
4700     }
4701 
on_vkBeginCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo,const VkDecoderContext & context)4702     VkResult on_vkBeginCommandBuffer(android::base::BumpPool* pool,
4703                                      VkCommandBuffer boxed_commandBuffer,
4704                                      const VkCommandBufferBeginInfo* pBeginInfo,
4705                                      const VkDecoderContext& context) {
4706         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4707         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4708         VkResult result = vk->vkBeginCommandBuffer(commandBuffer, pBeginInfo);
4709 
4710         if (result != VK_SUCCESS) {
4711             return result;
4712         }
4713 
4714         std::lock_guard<std::recursive_mutex> lock(mLock);
4715 
4716         auto* commandBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
4717         if (!commandBufferInfo) return VK_ERROR_UNKNOWN;
4718         commandBufferInfo->reset();
4719 
4720         if (context.processName) {
4721             commandBufferInfo->debugUtilsHelper.cmdBeginDebugLabel(commandBuffer, "Process %s",
4722                                                                    context.processName);
4723         }
4724 
4725         return VK_SUCCESS;
4726     }
4727 
on_vkBeginCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo,const VkDecoderContext & context)4728     VkResult on_vkBeginCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
4729                                                 VkCommandBuffer boxed_commandBuffer,
4730                                                 const VkCommandBufferBeginInfo* pBeginInfo,
4731                                                 const VkDecoderContext& context) {
4732         return this->on_vkBeginCommandBuffer(pool, boxed_commandBuffer, pBeginInfo, context);
4733     }
4734 
on_vkEndCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkDecoderContext & context)4735     VkResult on_vkEndCommandBuffer(android::base::BumpPool* pool,
4736                                    VkCommandBuffer boxed_commandBuffer,
4737                                    const VkDecoderContext& context) {
4738         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4739         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4740 
4741         std::lock_guard<std::recursive_mutex> lock(mLock);
4742 
4743         auto* commandBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
4744         if (!commandBufferInfo) return VK_ERROR_UNKNOWN;
4745 
4746         if (context.processName) {
4747             commandBufferInfo->debugUtilsHelper.cmdEndDebugLabel(commandBuffer);
4748         }
4749 
4750         return vk->vkEndCommandBuffer(commandBuffer);
4751     }
4752 
on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkDecoderContext & context)4753     void on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
4754                                           VkCommandBuffer boxed_commandBuffer,
4755                                           const VkDecoderContext& context) {
4756         on_vkEndCommandBuffer(pool, boxed_commandBuffer, context);
4757     }
4758 
on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkCommandBufferResetFlags flags)4759     void on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
4760                                             VkCommandBuffer boxed_commandBuffer,
4761                                             VkCommandBufferResetFlags flags) {
4762         on_vkResetCommandBuffer(pool, boxed_commandBuffer, flags);
4763     }
4764 
on_vkCmdBindPipeline(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)4765     void on_vkCmdBindPipeline(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
4766                               VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
4767         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4768         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4769         vk->vkCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
4770         if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_COMPUTE) {
4771             std::lock_guard<std::recursive_mutex> lock(mLock);
4772             auto* cmdBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
4773             if (cmdBufferInfo) {
4774                 cmdBufferInfo->computePipeline = pipeline;
4775             }
4776         }
4777     }
4778 
on_vkCmdBindDescriptorSets(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)4779     void on_vkCmdBindDescriptorSets(android::base::BumpPool* pool,
4780                                     VkCommandBuffer boxed_commandBuffer,
4781                                     VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
4782                                     uint32_t firstSet, uint32_t descriptorSetCount,
4783                                     const VkDescriptorSet* pDescriptorSets,
4784                                     uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
4785         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4786         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4787         vk->vkCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet,
4788                                     descriptorSetCount, pDescriptorSets, dynamicOffsetCount,
4789                                     pDynamicOffsets);
4790         if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_COMPUTE) {
4791             std::lock_guard<std::recursive_mutex> lock(mLock);
4792             auto* cmdBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
4793             if (cmdBufferInfo) {
4794                 cmdBufferInfo->descriptorLayout = layout;
4795 
4796                 if (descriptorSetCount) {
4797                     cmdBufferInfo->firstSet = firstSet;
4798                     cmdBufferInfo->descriptorSets.assign(pDescriptorSets,
4799                                                          pDescriptorSets + descriptorSetCount);
4800                     cmdBufferInfo->dynamicOffsets.assign(pDynamicOffsets,
4801                                                          pDynamicOffsets + dynamicOffsetCount);
4802                 }
4803             }
4804         }
4805     }
4806 
on_vkCreateRenderPass(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)4807     VkResult on_vkCreateRenderPass(android::base::BumpPool* pool, VkDevice boxed_device,
4808                                    const VkRenderPassCreateInfo* pCreateInfo,
4809                                    const VkAllocationCallbacks* pAllocator,
4810                                    VkRenderPass* pRenderPass) {
4811         auto device = unbox_VkDevice(boxed_device);
4812         auto vk = dispatch_VkDevice(boxed_device);
4813         VkRenderPassCreateInfo createInfo;
4814         bool needReformat = false;
4815         std::lock_guard<std::recursive_mutex> lock(mLock);
4816 
4817         auto* deviceInfo = android::base::find(mDeviceInfo, device);
4818         if (!deviceInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
4819         if (deviceInfo->emulateTextureEtc2 || deviceInfo->emulateTextureAstc) {
4820             for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
4821                 if (deviceInfo->needEmulatedDecompression(pCreateInfo->pAttachments[i].format)) {
4822                     needReformat = true;
4823                     break;
4824                 }
4825             }
4826         }
4827         std::vector<VkAttachmentDescription> attachments;
4828         if (needReformat) {
4829             createInfo = *pCreateInfo;
4830             attachments.assign(pCreateInfo->pAttachments,
4831                                pCreateInfo->pAttachments + pCreateInfo->attachmentCount);
4832             createInfo.pAttachments = attachments.data();
4833             for (auto& attachment : attachments) {
4834                 attachment.format = CompressedImageInfo::getOutputFormat(attachment.format);
4835             }
4836             pCreateInfo = &createInfo;
4837         }
4838         VkResult res = vk->vkCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
4839         if (res != VK_SUCCESS) {
4840             return res;
4841         }
4842 
4843         auto& renderPassInfo = mRenderPassInfo[*pRenderPass];
4844         renderPassInfo.device = device;
4845 
4846         *pRenderPass = new_boxed_non_dispatchable_VkRenderPass(*pRenderPass);
4847 
4848         return res;
4849     }
4850 
on_vkCreateRenderPass2(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo2 * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)4851     VkResult on_vkCreateRenderPass2(android::base::BumpPool* pool, VkDevice boxed_device,
4852                                     const VkRenderPassCreateInfo2* pCreateInfo,
4853                                     const VkAllocationCallbacks* pAllocator,
4854                                     VkRenderPass* pRenderPass) {
4855         auto device = unbox_VkDevice(boxed_device);
4856         auto vk = dispatch_VkDevice(boxed_device);
4857         std::lock_guard<std::recursive_mutex> lock(mLock);
4858 
4859         VkResult res = vk->vkCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass);
4860         if (res != VK_SUCCESS) {
4861             return res;
4862         }
4863 
4864         auto& renderPassInfo = mRenderPassInfo[*pRenderPass];
4865         renderPassInfo.device = device;
4866 
4867         *pRenderPass = new_boxed_non_dispatchable_VkRenderPass(*pRenderPass);
4868 
4869         return res;
4870     }
4871 
destroyRenderPassLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)4872     void destroyRenderPassLocked(VkDevice device, VulkanDispatch* deviceDispatch,
4873                                  VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) {
4874         deviceDispatch->vkDestroyRenderPass(device, renderPass, pAllocator);
4875 
4876         mRenderPassInfo.erase(renderPass);
4877     }
4878 
on_vkDestroyRenderPass(android::base::BumpPool * pool,VkDevice boxed_device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)4879     void on_vkDestroyRenderPass(android::base::BumpPool* pool, VkDevice boxed_device,
4880                                 VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) {
4881         auto device = unbox_VkDevice(boxed_device);
4882         auto deviceDispatch = dispatch_VkDevice(boxed_device);
4883 
4884         std::lock_guard<std::recursive_mutex> lock(mLock);
4885         destroyRenderPassLocked(device, deviceDispatch, renderPass, pAllocator);
4886     }
4887 
on_vkCmdCopyQueryPoolResults(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)4888     void on_vkCmdCopyQueryPoolResults(android::base::BumpPool* pool,
4889                                       VkCommandBuffer boxed_commandBuffer, VkQueryPool queryPool,
4890                                       uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
4891                                       VkDeviceSize dstOffset, VkDeviceSize stride,
4892                                       VkQueryResultFlags flags) {
4893         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4894         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4895         if (queryCount == 1 && stride == 0) {
4896             // Some drivers don't seem to handle stride==0 very well.
4897             // In fact, the spec does not say what should happen with stride==0.
4898             // So we just use the largest stride possible.
4899             stride = mBufferInfo[dstBuffer].size - dstOffset;
4900         }
4901         vk->vkCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer,
4902                                       dstOffset, stride, flags);
4903     }
4904 
on_vkCreateFramebuffer(android::base::BumpPool * pool,VkDevice boxed_device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)4905     VkResult on_vkCreateFramebuffer(android::base::BumpPool* pool, VkDevice boxed_device,
4906                                     const VkFramebufferCreateInfo* pCreateInfo,
4907                                     const VkAllocationCallbacks* pAllocator,
4908                                     VkFramebuffer* pFramebuffer) {
4909         auto device = unbox_VkDevice(boxed_device);
4910         auto deviceDispatch = dispatch_VkDevice(boxed_device);
4911 
4912         VkResult result =
4913             deviceDispatch->vkCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
4914         if (result != VK_SUCCESS) {
4915             return result;
4916         }
4917 
4918         std::lock_guard<std::recursive_mutex> lock(mLock);
4919 
4920         auto& framebufferInfo = mFramebufferInfo[*pFramebuffer];
4921         framebufferInfo.device = device;
4922 
4923         *pFramebuffer = new_boxed_non_dispatchable_VkFramebuffer(*pFramebuffer);
4924 
4925         return result;
4926     }
4927 
destroyFramebufferLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)4928     void destroyFramebufferLocked(VkDevice device, VulkanDispatch* deviceDispatch,
4929                                   VkFramebuffer framebuffer,
4930                                   const VkAllocationCallbacks* pAllocator) {
4931         deviceDispatch->vkDestroyFramebuffer(device, framebuffer, pAllocator);
4932 
4933         mFramebufferInfo.erase(framebuffer);
4934     }
4935 
on_vkDestroyFramebuffer(android::base::BumpPool * pool,VkDevice boxed_device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)4936     void on_vkDestroyFramebuffer(android::base::BumpPool* pool, VkDevice boxed_device,
4937                                  VkFramebuffer framebuffer,
4938                                  const VkAllocationCallbacks* pAllocator) {
4939         auto device = unbox_VkDevice(boxed_device);
4940         auto deviceDispatch = dispatch_VkDevice(boxed_device);
4941 
4942         std::lock_guard<std::recursive_mutex> lock(mLock);
4943         destroyFramebufferLocked(device, deviceDispatch, framebuffer, pAllocator);
4944     }
4945 
on_vkQueueBindSparse(android::base::BumpPool * pool,VkQueue boxed_queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)4946     VkResult on_vkQueueBindSparse(android::base::BumpPool* pool, VkQueue boxed_queue,
4947                                   uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo,
4948                                   VkFence fence) {
4949         // If pBindInfo contains VkTimelineSemaphoreSubmitInfo, then it's
4950         // possible the host driver isn't equipped to deal with them yet.  To
4951         // work around this, send empty vkQueueSubmits before and after the
4952         // call to vkQueueBindSparse that contain the right values for
4953         // wait/signal semaphores and contains the user's
4954         // VkTimelineSemaphoreSubmitInfo structure, following the *submission
4955         // order* implied by the indices of pBindInfo.
4956 
4957         // TODO: Detect if we are running on a driver that supports timeline
4958         // semaphore signal/wait operations in vkQueueBindSparse
4959         const bool needTimelineSubmitInfoWorkaround = true;
4960         (void)needTimelineSubmitInfoWorkaround;
4961 
4962         bool hasTimelineSemaphoreSubmitInfo = false;
4963 
4964         for (uint32_t i = 0; i < bindInfoCount; ++i) {
4965             const VkTimelineSemaphoreSubmitInfoKHR* tsSi =
4966                 vk_find_struct<VkTimelineSemaphoreSubmitInfoKHR>(pBindInfo + i);
4967             if (tsSi) {
4968                 hasTimelineSemaphoreSubmitInfo = true;
4969             }
4970         }
4971 
4972         auto queue = unbox_VkQueue(boxed_queue);
4973         auto vk = dispatch_VkQueue(boxed_queue);
4974 
4975         if (!hasTimelineSemaphoreSubmitInfo) {
4976             (void)pool;
4977             return vk->vkQueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
4978         } else {
4979             std::vector<VkPipelineStageFlags> waitDstStageMasks;
4980             VkTimelineSemaphoreSubmitInfoKHR currTsSi = {
4981                 VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, 0, 0, nullptr, 0, nullptr,
4982             };
4983 
4984             VkSubmitInfo currSi = {
4985                 VK_STRUCTURE_TYPE_SUBMIT_INFO,
4986                 &currTsSi,
4987                 0,
4988                 nullptr,
4989                 nullptr,
4990                 0,
4991                 nullptr,  // No commands
4992                 0,
4993                 nullptr,
4994             };
4995 
4996             VkBindSparseInfo currBi;
4997 
4998             VkResult res;
4999 
5000             for (uint32_t i = 0; i < bindInfoCount; ++i) {
5001                 const VkTimelineSemaphoreSubmitInfoKHR* tsSi =
5002                     vk_find_struct<VkTimelineSemaphoreSubmitInfoKHR>(pBindInfo + i);
5003                 if (!tsSi) {
5004                     res = vk->vkQueueBindSparse(queue, 1, pBindInfo + i, fence);
5005                     if (VK_SUCCESS != res) return res;
5006                     continue;
5007                 }
5008 
5009                 currTsSi.waitSemaphoreValueCount = tsSi->waitSemaphoreValueCount;
5010                 currTsSi.pWaitSemaphoreValues = tsSi->pWaitSemaphoreValues;
5011                 currTsSi.signalSemaphoreValueCount = 0;
5012                 currTsSi.pSignalSemaphoreValues = nullptr;
5013 
5014                 currSi.waitSemaphoreCount = pBindInfo[i].waitSemaphoreCount;
5015                 currSi.pWaitSemaphores = pBindInfo[i].pWaitSemaphores;
5016                 waitDstStageMasks.resize(pBindInfo[i].waitSemaphoreCount,
5017                                          VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
5018                 currSi.pWaitDstStageMask = waitDstStageMasks.data();
5019 
5020                 currSi.signalSemaphoreCount = 0;
5021                 currSi.pSignalSemaphores = nullptr;
5022 
5023                 res = vk->vkQueueSubmit(queue, 1, &currSi, nullptr);
5024                 if (VK_SUCCESS != res) return res;
5025 
5026                 currBi = pBindInfo[i];
5027 
5028                 vk_struct_chain_remove(tsSi, &currBi);
5029 
5030                 currBi.waitSemaphoreCount = 0;
5031                 currBi.pWaitSemaphores = nullptr;
5032                 currBi.signalSemaphoreCount = 0;
5033                 currBi.pSignalSemaphores = nullptr;
5034 
5035                 res = vk->vkQueueBindSparse(queue, 1, &currBi, nullptr);
5036                 if (VK_SUCCESS != res) return res;
5037 
5038                 currTsSi.waitSemaphoreValueCount = 0;
5039                 currTsSi.pWaitSemaphoreValues = nullptr;
5040                 currTsSi.signalSemaphoreValueCount = tsSi->signalSemaphoreValueCount;
5041                 currTsSi.pSignalSemaphoreValues = tsSi->pSignalSemaphoreValues;
5042 
5043                 currSi.waitSemaphoreCount = 0;
5044                 currSi.pWaitSemaphores = nullptr;
5045                 currSi.signalSemaphoreCount = pBindInfo[i].signalSemaphoreCount;
5046                 currSi.pSignalSemaphores = pBindInfo[i].pSignalSemaphores;
5047 
5048                 res =
5049                     vk->vkQueueSubmit(queue, 1, &currSi, i == bindInfoCount - 1 ? fence : nullptr);
5050                 if (VK_SUCCESS != res) return res;
5051             }
5052 
5053             return VK_SUCCESS;
5054         }
5055     }
5056 
on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkFormat format,VkDeviceSize * pOffset,VkDeviceSize * pRowPitchAlignment)5057     void on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
5058                                          VkFormat format, VkDeviceSize* pOffset,
5059                                          VkDeviceSize* pRowPitchAlignment) {
5060         if (mPerFormatLinearImageProperties.find(format) == mPerFormatLinearImageProperties.end()) {
5061             VkDeviceSize offset = 0u;
5062             VkDeviceSize rowPitchAlignment = UINT_MAX;
5063 
5064             for (uint32_t width = 64; width <= 256; width++) {
5065                 LinearImageCreateInfo linearImageCreateInfo = {
5066                     .extent =
5067                         {
5068                             .width = width,
5069                             .height = 64,
5070                             .depth = 1,
5071                         },
5072                     .format = format,
5073                     .usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
5074                 };
5075 
5076                 VkDeviceSize currOffset = 0u;
5077                 VkDeviceSize currRowPitchAlignment = UINT_MAX;
5078 
5079                 VkImageCreateInfo defaultVkImageCreateInfo = linearImageCreateInfo.toDefaultVk();
5080                 on_vkGetLinearImageLayout2GOOGLE(pool, boxed_device, &defaultVkImageCreateInfo,
5081                                                  &currOffset, &currRowPitchAlignment);
5082 
5083                 offset = currOffset;
5084                 rowPitchAlignment = std::min(currRowPitchAlignment, rowPitchAlignment);
5085             }
5086             mPerFormatLinearImageProperties[format] = LinearImageProperties{
5087                 .offset = offset,
5088                 .rowPitchAlignment = rowPitchAlignment,
5089             };
5090         }
5091 
5092         if (pOffset) {
5093             *pOffset = mPerFormatLinearImageProperties[format].offset;
5094         }
5095         if (pRowPitchAlignment) {
5096             *pRowPitchAlignment = mPerFormatLinearImageProperties[format].rowPitchAlignment;
5097         }
5098     }
5099 
on_vkGetLinearImageLayout2GOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageCreateInfo * pCreateInfo,VkDeviceSize * pOffset,VkDeviceSize * pRowPitchAlignment)5100     void on_vkGetLinearImageLayout2GOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
5101                                           const VkImageCreateInfo* pCreateInfo,
5102                                           VkDeviceSize* pOffset, VkDeviceSize* pRowPitchAlignment) {
5103         LinearImageCreateInfo linearImageCreateInfo = {
5104             .extent = pCreateInfo->extent,
5105             .format = pCreateInfo->format,
5106             .usage = pCreateInfo->usage,
5107         };
5108         if (mLinearImageProperties.find(linearImageCreateInfo) == mLinearImageProperties.end()) {
5109             auto device = unbox_VkDevice(boxed_device);
5110             auto vk = dispatch_VkDevice(boxed_device);
5111 
5112             VkImageSubresource subresource = {
5113                 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
5114                 .mipLevel = 0,
5115                 .arrayLayer = 0,
5116             };
5117 
5118             VkImage image;
5119             VkSubresourceLayout subresourceLayout;
5120 
5121             VkImageCreateInfo defaultVkImageCreateInfo = linearImageCreateInfo.toDefaultVk();
5122             VkResult result = vk->vkCreateImage(device, &defaultVkImageCreateInfo, nullptr, &image);
5123             if (result != VK_SUCCESS) {
5124                 fprintf(stderr, "vkCreateImage failed. size: (%u x %u) result: %d\n",
5125                         linearImageCreateInfo.extent.width, linearImageCreateInfo.extent.height,
5126                         result);
5127                 return;
5128             }
5129             vk->vkGetImageSubresourceLayout(device, image, &subresource, &subresourceLayout);
5130             vk->vkDestroyImage(device, image, nullptr);
5131 
5132             VkDeviceSize offset = subresourceLayout.offset;
5133             uint64_t rowPitch = subresourceLayout.rowPitch;
5134             VkDeviceSize rowPitchAlignment = rowPitch & (~rowPitch + 1);
5135 
5136             mLinearImageProperties[linearImageCreateInfo] = {
5137                 .offset = offset,
5138                 .rowPitchAlignment = rowPitchAlignment,
5139             };
5140         }
5141 
5142         if (pOffset != nullptr) {
5143             *pOffset = mLinearImageProperties[linearImageCreateInfo].offset;
5144         }
5145         if (pRowPitchAlignment != nullptr) {
5146             *pRowPitchAlignment = mLinearImageProperties[linearImageCreateInfo].rowPitchAlignment;
5147         }
5148     }
5149 
5150 #include "VkSubDecoder.cpp"
5151 
on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool * pool,VkQueue queue,VkCommandBuffer boxed_commandBuffer,VkDeviceSize dataSize,const void * pData,const VkDecoderContext & context)5152     void on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool* pool, VkQueue queue,
5153                                        VkCommandBuffer boxed_commandBuffer, VkDeviceSize dataSize,
5154                                        const void* pData, const VkDecoderContext& context) {
5155         (void)queue;
5156 
5157         VkCommandBuffer commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
5158         VulkanDispatch* vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
5159         VulkanMemReadingStream* readStream = readstream_VkCommandBuffer(boxed_commandBuffer);
5160         subDecode(readStream, vk, boxed_commandBuffer, commandBuffer, dataSize, pData, context);
5161     }
5162 
on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(android::base::BumpPool * pool,VkQueue queue,VkCommandBuffer commandBuffer,VkDeviceMemory deviceMemory,VkDeviceSize dataOffset,VkDeviceSize dataSize,const VkDecoderContext & context)5163     void on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(android::base::BumpPool* pool, VkQueue queue,
5164                                                     VkCommandBuffer commandBuffer,
5165                                                     VkDeviceMemory deviceMemory,
5166                                                     VkDeviceSize dataOffset, VkDeviceSize dataSize,
5167                                                     const VkDecoderContext& context) {
5168         // TODO : implement
5169     }
getOrAllocateDescriptorSetFromPoolAndId(VulkanDispatch * vk,VkDevice device,VkDescriptorPool pool,VkDescriptorSetLayout setLayout,uint64_t poolId,uint32_t pendingAlloc,bool * didAlloc)5170     VkDescriptorSet getOrAllocateDescriptorSetFromPoolAndId(VulkanDispatch* vk, VkDevice device,
5171                                                             VkDescriptorPool pool,
5172                                                             VkDescriptorSetLayout setLayout,
5173                                                             uint64_t poolId, uint32_t pendingAlloc,
5174                                                             bool* didAlloc) {
5175         auto* poolInfo = android::base::find(mDescriptorPoolInfo, pool);
5176         if (!poolInfo) {
5177             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
5178                 << "descriptor pool " << pool << " not found ";
5179         }
5180 
5181         DispatchableHandleInfo<uint64_t>* setHandleInfo = sBoxedHandleManager.get(poolId);
5182 
5183         if (setHandleInfo->underlying) {
5184             if (pendingAlloc) {
5185                 VkDescriptorSet allocedSet;
5186                 vk->vkFreeDescriptorSets(device, pool, 1,
5187                                          (VkDescriptorSet*)(&setHandleInfo->underlying));
5188                 VkDescriptorSetAllocateInfo dsAi = {
5189                     VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, 0, pool, 1, &setLayout,
5190                 };
5191                 vk->vkAllocateDescriptorSets(device, &dsAi, &allocedSet);
5192                 setHandleInfo->underlying = (uint64_t)allocedSet;
5193                 initDescriptorSetInfoLocked(pool, setLayout, poolId, allocedSet);
5194                 *didAlloc = true;
5195                 return allocedSet;
5196             } else {
5197                 *didAlloc = false;
5198                 return (VkDescriptorSet)(setHandleInfo->underlying);
5199             }
5200         } else {
5201             if (pendingAlloc) {
5202                 VkDescriptorSet allocedSet;
5203                 VkDescriptorSetAllocateInfo dsAi = {
5204                     VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, 0, pool, 1, &setLayout,
5205                 };
5206                 vk->vkAllocateDescriptorSets(device, &dsAi, &allocedSet);
5207                 setHandleInfo->underlying = (uint64_t)allocedSet;
5208                 initDescriptorSetInfoLocked(pool, setLayout, poolId, allocedSet);
5209                 *didAlloc = true;
5210                 return allocedSet;
5211             } else {
5212                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
5213                     << "descriptor pool " << pool << " wanted to get set with id 0x" << std::hex
5214                     << poolId;
5215                 return nullptr;
5216             }
5217         }
5218     }
5219 
on_vkQueueCommitDescriptorSetUpdatesGOOGLE(android::base::BumpPool * pool,VkQueue boxed_queue,uint32_t descriptorPoolCount,const VkDescriptorPool * pDescriptorPools,uint32_t descriptorSetCount,const VkDescriptorSetLayout * pDescriptorSetLayouts,const uint64_t * pDescriptorSetPoolIds,const uint32_t * pDescriptorSetWhichPool,const uint32_t * pDescriptorSetPendingAllocation,const uint32_t * pDescriptorWriteStartingIndices,uint32_t pendingDescriptorWriteCount,const VkWriteDescriptorSet * pPendingDescriptorWrites)5220     void on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
5221         android::base::BumpPool* pool, VkQueue boxed_queue, uint32_t descriptorPoolCount,
5222         const VkDescriptorPool* pDescriptorPools, uint32_t descriptorSetCount,
5223         const VkDescriptorSetLayout* pDescriptorSetLayouts, const uint64_t* pDescriptorSetPoolIds,
5224         const uint32_t* pDescriptorSetWhichPool, const uint32_t* pDescriptorSetPendingAllocation,
5225         const uint32_t* pDescriptorWriteStartingIndices, uint32_t pendingDescriptorWriteCount,
5226         const VkWriteDescriptorSet* pPendingDescriptorWrites) {
5227         std::lock_guard<std::recursive_mutex> lock(mLock);
5228 
5229         VkDevice device;
5230 
5231         auto queue = unbox_VkQueue(boxed_queue);
5232         auto vk = dispatch_VkQueue(boxed_queue);
5233 
5234         auto* queueInfo = android::base::find(mQueueInfo, queue);
5235         if (queueInfo) {
5236             device = queueInfo->device;
5237         } else {
5238             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
5239                 << "queue " << queue << "(boxed: " << boxed_queue << ") with no device registered";
5240         }
5241 
5242         std::vector<VkDescriptorSet> setsToUpdate(descriptorSetCount, nullptr);
5243 
5244         bool didAlloc = false;
5245 
5246         for (uint32_t i = 0; i < descriptorSetCount; ++i) {
5247             uint64_t poolId = pDescriptorSetPoolIds[i];
5248             uint32_t whichPool = pDescriptorSetWhichPool[i];
5249             uint32_t pendingAlloc = pDescriptorSetPendingAllocation[i];
5250             bool didAllocThisTime;
5251             setsToUpdate[i] = getOrAllocateDescriptorSetFromPoolAndId(
5252                 vk, device, pDescriptorPools[whichPool], pDescriptorSetLayouts[i], poolId,
5253                 pendingAlloc, &didAllocThisTime);
5254 
5255             if (didAllocThisTime) didAlloc = true;
5256         }
5257 
5258         if (didAlloc) {
5259             std::vector<VkWriteDescriptorSet> writeDescriptorSetsForHostDriver(
5260                 pendingDescriptorWriteCount);
5261             memcpy(writeDescriptorSetsForHostDriver.data(), pPendingDescriptorWrites,
5262                    pendingDescriptorWriteCount * sizeof(VkWriteDescriptorSet));
5263 
5264             for (uint32_t i = 0; i < descriptorSetCount; ++i) {
5265                 uint32_t writeStartIndex = pDescriptorWriteStartingIndices[i];
5266                 uint32_t writeEndIndex;
5267                 if (i == descriptorSetCount - 1) {
5268                     writeEndIndex = pendingDescriptorWriteCount;
5269                 } else {
5270                     writeEndIndex = pDescriptorWriteStartingIndices[i + 1];
5271                 }
5272 
5273                 for (uint32_t j = writeStartIndex; j < writeEndIndex; ++j) {
5274                     writeDescriptorSetsForHostDriver[j].dstSet = setsToUpdate[i];
5275                 }
5276             }
5277             this->on_vkUpdateDescriptorSetsImpl(
5278                 pool, vk, device, (uint32_t)writeDescriptorSetsForHostDriver.size(),
5279                 writeDescriptorSetsForHostDriver.data(), 0, nullptr);
5280         } else {
5281             this->on_vkUpdateDescriptorSetsImpl(pool, vk, device, pendingDescriptorWriteCount,
5282                                                 pPendingDescriptorWrites, 0, nullptr);
5283         }
5284     }
5285 
on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,uint32_t * pPoolIdCount,uint64_t * pPoolIds)5286     void on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool* pool, VkDevice device,
5287                                              VkDescriptorPool descriptorPool,
5288                                              uint32_t* pPoolIdCount, uint64_t* pPoolIds) {
5289         std::lock_guard<std::recursive_mutex> lock(mLock);
5290         auto& info = mDescriptorPoolInfo[descriptorPool];
5291         *pPoolIdCount = (uint32_t)info.poolIds.size();
5292 
5293         if (pPoolIds) {
5294             for (uint32_t i = 0; i < info.poolIds.size(); ++i) {
5295                 pPoolIds[i] = info.poolIds[i];
5296             }
5297         }
5298     }
5299 
on_vkCreateSamplerYcbcrConversion(android::base::BumpPool *,VkDevice boxed_device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)5300     VkResult on_vkCreateSamplerYcbcrConversion(
5301         android::base::BumpPool*, VkDevice boxed_device,
5302         const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
5303         const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) {
5304         if (m_emu->enableYcbcrEmulation && !m_emu->deviceInfo.supportsSamplerYcbcrConversion) {
5305             *pYcbcrConversion = new_boxed_non_dispatchable_VkSamplerYcbcrConversion(
5306                 (VkSamplerYcbcrConversion)((uintptr_t)0xffff0000ull));
5307             return VK_SUCCESS;
5308         }
5309         auto device = unbox_VkDevice(boxed_device);
5310         auto vk = dispatch_VkDevice(boxed_device);
5311         VkResult res =
5312             vk->vkCreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion);
5313         if (res != VK_SUCCESS) {
5314             return res;
5315         }
5316         *pYcbcrConversion = new_boxed_non_dispatchable_VkSamplerYcbcrConversion(*pYcbcrConversion);
5317         return VK_SUCCESS;
5318     }
5319 
on_vkDestroySamplerYcbcrConversion(android::base::BumpPool * pool,VkDevice boxed_device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)5320     void on_vkDestroySamplerYcbcrConversion(android::base::BumpPool* pool, VkDevice boxed_device,
5321                                             VkSamplerYcbcrConversion ycbcrConversion,
5322                                             const VkAllocationCallbacks* pAllocator) {
5323         if (m_emu->enableYcbcrEmulation && !m_emu->deviceInfo.supportsSamplerYcbcrConversion) {
5324             return;
5325         }
5326         auto device = unbox_VkDevice(boxed_device);
5327         auto vk = dispatch_VkDevice(boxed_device);
5328         vk->vkDestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
5329         return;
5330     }
5331 
on_DeviceLost()5332     void on_DeviceLost() { GFXSTREAM_ABORT(FatalError(VK_ERROR_DEVICE_LOST)); }
5333 
DeviceLostHandler()5334     void DeviceLostHandler() {}
5335 
on_CheckOutOfMemory(VkResult result,uint32_t opCode,const VkDecoderContext & context,std::optional<uint64_t> allocationSize=std::nullopt)5336     void on_CheckOutOfMemory(VkResult result, uint32_t opCode, const VkDecoderContext& context,
5337                              std::optional<uint64_t> allocationSize = std::nullopt) {
5338         if (result == VK_ERROR_OUT_OF_HOST_MEMORY || result == VK_ERROR_OUT_OF_DEVICE_MEMORY ||
5339             result == VK_ERROR_OUT_OF_POOL_MEMORY) {
5340             context.metricsLogger->logMetricEvent(
5341                 MetricEventVulkanOutOfMemory{.vkResultCode = result,
5342                                              .opCode = std::make_optional(opCode),
5343                                              .allocationSize = allocationSize});
5344         }
5345     }
5346 
waitForFence(VkFence boxed_fence,uint64_t timeout)5347     VkResult waitForFence(VkFence boxed_fence, uint64_t timeout) {
5348         VkFence fence;
5349         VkDevice device;
5350         VulkanDispatch* vk;
5351         StaticLock* fenceLock;
5352         ConditionVariable* cv;
5353         {
5354             std::lock_guard<std::recursive_mutex> lock(mLock);
5355 
5356             fence = unbox_VkFence(boxed_fence);
5357             if (fence == VK_NULL_HANDLE || mFenceInfo.find(fence) == mFenceInfo.end()) {
5358                 // No fence, could be a semaphore.
5359                 // TODO: Async wait for semaphores
5360                 return VK_SUCCESS;
5361             }
5362 
5363             // Vulkan specs require fences of vkQueueSubmit to be *externally
5364             // synchronized*, i.e. we cannot submit a queue while waiting for the
5365             // fence in another thread. For threads that call this function, they
5366             // have to wait until a vkQueueSubmit() using this fence is called
5367             // before calling vkWaitForFences(). So we use a conditional variable
5368             // and mutex for thread synchronization.
5369             //
5370             // See:
5371             // https://www.khronos.org/registry/vulkan/specs/1.2/html/vkspec.html#fundamentals-threadingbehavior
5372             // https://github.com/KhronosGroup/Vulkan-LoaderAndValidationLayers/issues/519
5373 
5374             device = mFenceInfo[fence].device;
5375             vk = mFenceInfo[fence].vk;
5376             fenceLock = &mFenceInfo[fence].lock;
5377             cv = &mFenceInfo[fence].cv;
5378         }
5379 
5380         fenceLock->lock();
5381         cv->wait(fenceLock, [this, fence] {
5382             std::lock_guard<std::recursive_mutex> lock(mLock);
5383             if (mFenceInfo[fence].state == FenceInfo::State::kWaitable) {
5384                 mFenceInfo[fence].state = FenceInfo::State::kWaiting;
5385                 return true;
5386             }
5387             return false;
5388         });
5389         fenceLock->unlock();
5390 
5391         {
5392             std::lock_guard<std::recursive_mutex> lock(mLock);
5393             if (mFenceInfo.find(fence) == mFenceInfo.end()) {
5394                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
5395                     << "Fence was destroyed before vkWaitForFences call.";
5396             }
5397         }
5398 
5399         return vk->vkWaitForFences(device, /* fenceCount */ 1u, &fence,
5400                                    /* waitAll */ false, timeout);
5401     }
5402 
getFenceStatus(VkFence boxed_fence)5403     VkResult getFenceStatus(VkFence boxed_fence) {
5404         VkDevice device;
5405         VkFence fence;
5406         VulkanDispatch* vk;
5407         {
5408             std::lock_guard<std::recursive_mutex> lock(mLock);
5409 
5410             fence = unbox_VkFence(boxed_fence);
5411             if (fence == VK_NULL_HANDLE || mFenceInfo.find(fence) == mFenceInfo.end()) {
5412                 // No fence, could be a semaphore.
5413                 // TODO: Async get status for semaphores
5414                 return VK_SUCCESS;
5415             }
5416 
5417             device = mFenceInfo[fence].device;
5418             vk = mFenceInfo[fence].vk;
5419         }
5420 
5421         return vk->vkGetFenceStatus(device, fence);
5422     }
5423 
registerQsriCallback(VkImage boxed_image,VkQsriTimeline::Callback callback)5424     AsyncResult registerQsriCallback(VkImage boxed_image, VkQsriTimeline::Callback callback) {
5425         VkImage image;
5426         std::shared_ptr<AndroidNativeBufferInfo> anbInfo;
5427         {
5428             std::lock_guard<std::recursive_mutex> lock(mLock);
5429 
5430             image = unbox_VkImage(boxed_image);
5431 
5432             if (mLogging) {
5433                 fprintf(stderr, "%s: for boxed image 0x%llx image %p\n", __func__,
5434                         (unsigned long long)boxed_image, image);
5435             }
5436 
5437             if (image == VK_NULL_HANDLE || mImageInfo.find(image) == mImageInfo.end()) {
5438                 // No image
5439                 return AsyncResult::FAIL_AND_CALLBACK_NOT_SCHEDULED;
5440             }
5441 
5442             anbInfo = mImageInfo[image].anbInfo;  // shared ptr, take ref
5443         }
5444 
5445         if (!anbInfo) {
5446             fprintf(stderr, "%s: warning: image %p doesn't ahve anb info\n", __func__, image);
5447             return AsyncResult::FAIL_AND_CALLBACK_NOT_SCHEDULED;
5448         }
5449         if (!anbInfo->vk) {
5450             fprintf(stderr, "%s:%p warning: image %p anb info not initialized\n", __func__,
5451                     anbInfo.get(), image);
5452             return AsyncResult::FAIL_AND_CALLBACK_NOT_SCHEDULED;
5453         }
5454         // Could be null or mismatched image, check later
5455         if (image != anbInfo->image) {
5456             fprintf(stderr, "%s:%p warning: image %p anb info has wrong image: %p\n", __func__,
5457                     anbInfo.get(), image, anbInfo->image);
5458             return AsyncResult::FAIL_AND_CALLBACK_NOT_SCHEDULED;
5459         }
5460 
5461         anbInfo->qsriTimeline->registerCallbackForNextPresentAndPoll(std::move(callback));
5462 
5463         if (mLogging) {
5464             fprintf(stderr, "%s:%p Done registering\n", __func__, anbInfo.get());
5465         }
5466         return AsyncResult::OK_AND_CALLBACK_SCHEDULED;
5467     }
5468 
5469 #define GUEST_EXTERNAL_MEMORY_HANDLE_TYPES                                \
5470     (VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID | \
5471      VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA)
5472 
5473     // Transforms
5474     // If adding a new transform here, please check if it needs to be used in VkDecoderTestDispatch
5475 
transformImpl_VkExternalMemoryProperties_tohost(const VkExternalMemoryProperties * props,uint32_t count)5476     void transformImpl_VkExternalMemoryProperties_tohost(const VkExternalMemoryProperties* props,
5477                                                          uint32_t count) {
5478         VkExternalMemoryProperties* mut = (VkExternalMemoryProperties*)props;
5479         for (uint32_t i = 0; i < count; ++i) {
5480             mut[i] = transformExternalMemoryProperties_tohost(mut[i]);
5481         }
5482     }
transformImpl_VkExternalMemoryProperties_fromhost(const VkExternalMemoryProperties * props,uint32_t count)5483     void transformImpl_VkExternalMemoryProperties_fromhost(const VkExternalMemoryProperties* props,
5484                                                            uint32_t count) {
5485         VkExternalMemoryProperties* mut = (VkExternalMemoryProperties*)props;
5486         for (uint32_t i = 0; i < count; ++i) {
5487             mut[i] = transformExternalMemoryProperties_fromhost(mut[i],
5488                                                                 GUEST_EXTERNAL_MEMORY_HANDLE_TYPES);
5489         }
5490     }
5491 
transformImpl_VkImageCreateInfo_tohost(const VkImageCreateInfo * pImageCreateInfos,uint32_t count)5492     void transformImpl_VkImageCreateInfo_tohost(const VkImageCreateInfo* pImageCreateInfos,
5493                                                 uint32_t count) {
5494         for (uint32_t i = 0; i < count; i++) {
5495             VkImageCreateInfo& imageCreateInfo =
5496                 const_cast<VkImageCreateInfo&>(pImageCreateInfos[i]);
5497             const VkExternalMemoryImageCreateInfo* pExternalMemoryImageCi =
5498                 vk_find_struct<VkExternalMemoryImageCreateInfo>(&imageCreateInfo);
5499             bool importAndroidHardwareBuffer =
5500                 pExternalMemoryImageCi &&
5501                 (pExternalMemoryImageCi->handleTypes &
5502                  VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID);
5503             const VkNativeBufferANDROID* pNativeBufferANDROID =
5504                 vk_find_struct<VkNativeBufferANDROID>(&imageCreateInfo);
5505 
5506             // If the VkImage is going to bind to a ColorBuffer, we have to make sure the VkImage
5507             // that backs the ColorBuffer is created with identical parameters. From the spec: If
5508             // two aliases are both images that were created with identical creation parameters,
5509             // both were created with the VK_IMAGE_CREATE_ALIAS_BIT flag set, and both are bound
5510             // identically to memory except for VkBindImageMemoryDeviceGroupInfo::pDeviceIndices and
5511             // VkBindImageMemoryDeviceGroupInfo::pSplitInstanceBindRegions, then they interpret the
5512             // contents of the memory in consistent ways, and data written to one alias can be read
5513             // by the other alias. ... Aliases created by binding the same memory to resources in
5514             // multiple Vulkan instances or external APIs using external memory handle export and
5515             // import mechanisms interpret the contents of the memory in consistent ways, and data
5516             // written to one alias can be read by the other alias. Otherwise, the aliases interpret
5517             // the contents of the memory differently, ...
5518             std::unique_ptr<VkImageCreateInfo> colorBufferVkImageCi = nullptr;
5519             std::string importSource;
5520             VkFormat resolvedFormat = VK_FORMAT_UNDEFINED;
5521             // Use UNORM formats for SRGB format requests.
5522             switch (imageCreateInfo.format) {
5523                 case VK_FORMAT_R8G8B8A8_SRGB:
5524                     resolvedFormat = VK_FORMAT_R8G8B8A8_UNORM;
5525                     break;
5526                 case VK_FORMAT_R8G8B8_SRGB:
5527                     resolvedFormat = VK_FORMAT_R8G8B8_UNORM;
5528                     break;
5529                 case VK_FORMAT_B8G8R8A8_SRGB:
5530                     resolvedFormat = VK_FORMAT_B8G8R8A8_UNORM;
5531                     break;
5532                 case VK_FORMAT_R8_SRGB:
5533                     resolvedFormat = VK_FORMAT_R8_UNORM;
5534                     break;
5535                 default:
5536                     resolvedFormat = imageCreateInfo.format;
5537             }
5538             if (importAndroidHardwareBuffer) {
5539                 // For AHardwareBufferImage binding, we can't know which ColorBuffer this
5540                 // to-be-created VkImage will bind to, so we try our best to infer the creation
5541                 // parameters.
5542                 colorBufferVkImageCi = generateColorBufferVkImageCreateInfo(
5543                     resolvedFormat, imageCreateInfo.extent.width, imageCreateInfo.extent.height,
5544                     imageCreateInfo.tiling);
5545                 importSource = "AHardwareBuffer";
5546             } else if (pNativeBufferANDROID) {
5547                 // For native buffer binding, we can query the creation parameters from handle.
5548                 uint32_t cbHandle = *static_cast<const uint32_t*>(pNativeBufferANDROID->handle);
5549                 auto colorBufferInfo = getColorBufferInfo(cbHandle);
5550                 if (colorBufferInfo.handle == cbHandle) {
5551                     colorBufferVkImageCi =
5552                         std::make_unique<VkImageCreateInfo>(colorBufferInfo.imageCreateInfoShallow);
5553                 } else {
5554                     ERR("Unknown ColorBuffer handle: %" PRIu32 ".", cbHandle);
5555                 }
5556                 importSource = "NativeBufferANDROID";
5557             }
5558             if (!colorBufferVkImageCi) {
5559                 continue;
5560             }
5561             imageCreateInfo.format = resolvedFormat;
5562             if (imageCreateInfo.flags & (~colorBufferVkImageCi->flags)) {
5563                 ERR("The VkImageCreateInfo to import %s contains unsupported VkImageCreateFlags. "
5564                     "All supported VkImageCreateFlags are %s, the input VkImageCreateInfo requires "
5565                     "support for %s.",
5566                     importSource.c_str(),
5567                     string_VkImageCreateFlags(colorBufferVkImageCi->flags).c_str(),
5568                     string_VkImageCreateFlags(imageCreateInfo.flags).c_str());
5569             }
5570             imageCreateInfo.flags |= colorBufferVkImageCi->flags;
5571             if (imageCreateInfo.imageType != colorBufferVkImageCi->imageType) {
5572                 ERR("The VkImageCreateInfo to import %s has an unexpected VkImageType: %s, %s "
5573                     "expected.",
5574                     importSource.c_str(), string_VkImageType(imageCreateInfo.imageType),
5575                     string_VkImageType(colorBufferVkImageCi->imageType));
5576             }
5577             if (imageCreateInfo.extent.depth != colorBufferVkImageCi->extent.depth) {
5578                 ERR("The VkImageCreateInfo to import %s has an unexpected VkExtent::depth: %" PRIu32
5579                     ", %" PRIu32 " expected.",
5580                     importSource.c_str(), imageCreateInfo.extent.depth,
5581                     colorBufferVkImageCi->extent.depth);
5582             }
5583             if (imageCreateInfo.mipLevels != colorBufferVkImageCi->mipLevels) {
5584                 ERR("The VkImageCreateInfo to import %s has an unexpected mipLevels: %" PRIu32
5585                     ", %" PRIu32 " expected.",
5586                     importSource.c_str(), imageCreateInfo.mipLevels,
5587                     colorBufferVkImageCi->mipLevels);
5588             }
5589             if (imageCreateInfo.arrayLayers != colorBufferVkImageCi->arrayLayers) {
5590                 ERR("The VkImageCreateInfo to import %s has an unexpected arrayLayers: %" PRIu32
5591                     ", %" PRIu32 " expected.",
5592                     importSource.c_str(), imageCreateInfo.arrayLayers,
5593                     colorBufferVkImageCi->arrayLayers);
5594             }
5595             if (imageCreateInfo.samples != colorBufferVkImageCi->samples) {
5596                 ERR("The VkImageCreateInfo to import %s has an unexpected VkSampleCountFlagBits: "
5597                     "%s, %s expected.",
5598                     importSource.c_str(), string_VkSampleCountFlagBits(imageCreateInfo.samples),
5599                     string_VkSampleCountFlagBits(colorBufferVkImageCi->samples));
5600             }
5601             if (imageCreateInfo.usage & (~colorBufferVkImageCi->usage)) {
5602                 ERR("The VkImageCreateInfo to import %s contains unsupported VkImageUsageFlags. "
5603                     "All supported VkImageUsageFlags are %s, the input VkImageCreateInfo requires "
5604                     "support for %s.",
5605                     importSource.c_str(),
5606                     string_VkImageUsageFlags(colorBufferVkImageCi->usage).c_str(),
5607                     string_VkImageUsageFlags(imageCreateInfo.usage).c_str());
5608             }
5609             imageCreateInfo.usage |= colorBufferVkImageCi->usage;
5610             // For the AndroidHardwareBuffer binding case VkImageCreateInfo::sharingMode isn't
5611             // filled in generateColorBufferVkImageCreateInfo, and
5612             // VkImageCreateInfo::{format,extent::{width, height}, tiling} are guaranteed to match.
5613             if (importAndroidHardwareBuffer) {
5614                 continue;
5615             }
5616             if (resolvedFormat != colorBufferVkImageCi->format) {
5617                 ERR("The VkImageCreateInfo to import %s contains unexpected VkFormat: %s. %s "
5618                     "expected.",
5619                     importSource.c_str(), string_VkFormat(imageCreateInfo.format),
5620                     string_VkFormat(colorBufferVkImageCi->format));
5621             }
5622             if (imageCreateInfo.extent.width != colorBufferVkImageCi->extent.width) {
5623                 ERR("The VkImageCreateInfo to import %s contains unexpected VkExtent::width: "
5624                     "%" PRIu32 ". %" PRIu32 " expected.",
5625                     importSource.c_str(), imageCreateInfo.extent.width,
5626                     colorBufferVkImageCi->extent.width);
5627             }
5628             if (imageCreateInfo.extent.height != colorBufferVkImageCi->extent.height) {
5629                 ERR("The VkImageCreateInfo to import %s contains unexpected VkExtent::height: "
5630                     "%" PRIu32 ". %" PRIu32 " expected.",
5631                     importSource.c_str(), imageCreateInfo.extent.height,
5632                     colorBufferVkImageCi->extent.height);
5633             }
5634             if (imageCreateInfo.tiling != colorBufferVkImageCi->tiling) {
5635                 ERR("The VkImageCreateInfo to import %s contains unexpected VkImageTiling: %s. %s "
5636                     "expected.",
5637                     importSource.c_str(), string_VkImageTiling(imageCreateInfo.tiling),
5638                     string_VkImageTiling(colorBufferVkImageCi->tiling));
5639             }
5640             if (imageCreateInfo.sharingMode != colorBufferVkImageCi->sharingMode) {
5641                 ERR("The VkImageCreateInfo to import %s contains unexpected VkSharingMode: %s. %s "
5642                     "expected.",
5643                     importSource.c_str(), string_VkSharingMode(imageCreateInfo.sharingMode),
5644                     string_VkSharingMode(colorBufferVkImageCi->sharingMode));
5645             }
5646         }
5647     }
5648 
transformImpl_VkImageCreateInfo_fromhost(const VkImageCreateInfo *,uint32_t)5649     void transformImpl_VkImageCreateInfo_fromhost(const VkImageCreateInfo*, uint32_t) {
5650         GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << "Not yet implemented.";
5651     }
5652 
5653 #define DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(type, field)                                         \
5654     void transformImpl_##type##_tohost(const type* props, uint32_t count) {                        \
5655         type* mut = (type*)props;                                                                  \
5656         for (uint32_t i = 0; i < count; ++i) {                                                     \
5657             mut[i].field =                                                                         \
5658                 (VkExternalMemoryHandleTypeFlagBits)transformExternalMemoryHandleTypeFlags_tohost( \
5659                     mut[i].field);                                                                 \
5660         }                                                                                          \
5661     }                                                                                              \
5662     void transformImpl_##type##_fromhost(const type* props, uint32_t count) {                      \
5663         type* mut = (type*)props;                                                                  \
5664         for (uint32_t i = 0; i < count; ++i) {                                                     \
5665             mut[i].field = (VkExternalMemoryHandleTypeFlagBits)                                    \
5666                 transformExternalMemoryHandleTypeFlags_fromhost(                                   \
5667                     mut[i].field, GUEST_EXTERNAL_MEMORY_HANDLE_TYPES);                             \
5668         }                                                                                          \
5669     }
5670 
5671 #define DEFINE_EXTERNAL_MEMORY_PROPERTIES_TRANSFORM(type)                                  \
5672     void transformImpl_##type##_tohost(const type* props, uint32_t count) {                \
5673         type* mut = (type*)props;                                                          \
5674         for (uint32_t i = 0; i < count; ++i) {                                             \
5675             mut[i].externalMemoryProperties =                                              \
5676                 transformExternalMemoryProperties_tohost(mut[i].externalMemoryProperties); \
5677         }                                                                                  \
5678     }                                                                                      \
5679     void transformImpl_##type##_fromhost(const type* props, uint32_t count) {              \
5680         type* mut = (type*)props;                                                          \
5681         for (uint32_t i = 0; i < count; ++i) {                                             \
5682             mut[i].externalMemoryProperties = transformExternalMemoryProperties_fromhost(  \
5683                 mut[i].externalMemoryProperties, GUEST_EXTERNAL_MEMORY_HANDLE_TYPES);      \
5684         }                                                                                  \
5685     }
5686 
DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkPhysicalDeviceExternalImageFormatInfo,handleType)5687     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkPhysicalDeviceExternalImageFormatInfo, handleType)
5688     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkPhysicalDeviceExternalBufferInfo, handleType)
5689     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkExternalMemoryImageCreateInfo, handleTypes)
5690     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkExternalMemoryBufferCreateInfo, handleTypes)
5691     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkExportMemoryAllocateInfo, handleTypes)
5692     DEFINE_EXTERNAL_MEMORY_PROPERTIES_TRANSFORM(VkExternalImageFormatProperties)
5693     DEFINE_EXTERNAL_MEMORY_PROPERTIES_TRANSFORM(VkExternalBufferProperties)
5694 
5695     uint64_t newGlobalHandle(const DispatchableHandleInfo<uint64_t>& item,
5696                              BoxedHandleTypeTag typeTag) {
5697         if (!mCreatedHandlesForSnapshotLoad.empty() &&
5698             (mCreatedHandlesForSnapshotLoad.size() - mCreatedHandlesForSnapshotLoadIndex > 0)) {
5699             auto handle = mCreatedHandlesForSnapshotLoad[mCreatedHandlesForSnapshotLoadIndex];
5700             VKDGS_LOG("use handle: %p", handle);
5701             ++mCreatedHandlesForSnapshotLoadIndex;
5702             auto res = sBoxedHandleManager.addFixed(handle, item, typeTag);
5703             return res;
5704         } else {
5705             return sBoxedHandleManager.add(item, typeTag);
5706         }
5707     }
5708 
5709 #define DEFINE_BOXED_DISPATCHABLE_HANDLE_API_IMPL(type)                                           \
5710     type new_boxed_##type(type underlying, VulkanDispatch* dispatch, bool ownDispatch) {          \
5711         DispatchableHandleInfo<uint64_t> item;                                                    \
5712         item.underlying = (uint64_t)underlying;                                                   \
5713         item.dispatch = dispatch ? dispatch : new VulkanDispatch;                                 \
5714         item.ownDispatch = ownDispatch;                                                           \
5715         item.ordMaintInfo = new OrderMaintenanceInfo;                                             \
5716         item.readStream = nullptr;                                                                \
5717         auto res = (type)newGlobalHandle(item, Tag_##type);                                       \
5718         return res;                                                                               \
5719     }                                                                                             \
5720     void delete_##type(type boxed) {                                                              \
5721         if (!boxed) return;                                                                       \
5722         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
5723         if (!elt) return;                                                                         \
5724         releaseOrderMaintInfo(elt->ordMaintInfo);                                                 \
5725         if (elt->readStream) {                                                                    \
5726             sReadStreamRegistry.push(elt->readStream);                                            \
5727             elt->readStream = nullptr;                                                            \
5728         }                                                                                         \
5729         sBoxedHandleManager.remove((uint64_t)boxed);                                              \
5730     }                                                                                             \
5731     type unbox_##type(type boxed) {                                                               \
5732         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
5733         if (!elt) return VK_NULL_HANDLE;                                                          \
5734         return (type)elt->underlying;                                                             \
5735     }                                                                                             \
5736     OrderMaintenanceInfo* ordmaint_##type(type boxed) {                                           \
5737         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
5738         if (!elt) return 0;                                                                       \
5739         auto info = elt->ordMaintInfo;                                                            \
5740         if (!info) return 0;                                                                      \
5741         acquireOrderMaintInfo(info);                                                              \
5742         return info;                                                                              \
5743     }                                                                                             \
5744     VulkanMemReadingStream* readstream_##type(type boxed) {                                       \
5745         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
5746         if (!elt) return 0;                                                                       \
5747         auto stream = elt->readStream;                                                            \
5748         if (!stream) {                                                                            \
5749             stream = sReadStreamRegistry.pop();                                                   \
5750             elt->readStream = stream;                                                             \
5751         }                                                                                         \
5752         return stream;                                                                            \
5753     }                                                                                             \
5754     type unboxed_to_boxed_##type(type unboxed) {                                                  \
5755         AutoLock lock(sBoxedHandleManager.lock);                                                  \
5756         return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
5757     }                                                                                             \
5758     VulkanDispatch* dispatch_##type(type boxed) {                                                 \
5759         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
5760         if (!elt) {                                                                               \
5761             fprintf(stderr, "%s: err not found boxed %p\n", __func__, boxed);                     \
5762             return nullptr;                                                                       \
5763         }                                                                                         \
5764         return elt->dispatch;                                                                     \
5765     }
5766 
5767 #define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_IMPL(type)                                       \
5768     type new_boxed_non_dispatchable_##type(type underlying) {                                     \
5769         DispatchableHandleInfo<uint64_t> item;                                                    \
5770         item.underlying = (uint64_t)underlying;                                                   \
5771         auto res = (type)newGlobalHandle(item, Tag_##type);                                       \
5772         return res;                                                                               \
5773     }                                                                                             \
5774     void delayed_delete_##type(type boxed, VkDevice device, std::function<void()> callback) {     \
5775         sBoxedHandleManager.removeDelayed((uint64_t)boxed, device, callback);                     \
5776     }                                                                                             \
5777     void delete_##type(type boxed) { sBoxedHandleManager.remove((uint64_t)boxed); }               \
5778     void set_boxed_non_dispatchable_##type(type boxed, type underlying) {                         \
5779         DispatchableHandleInfo<uint64_t> item;                                                    \
5780         item.underlying = (uint64_t)underlying;                                                   \
5781         sBoxedHandleManager.addFixed((uint64_t)boxed, item, Tag_##type);                          \
5782     }                                                                                             \
5783     type unboxed_to_boxed_non_dispatchable_##type(type unboxed) {                                 \
5784         AutoLock lock(sBoxedHandleManager.lock);                                                  \
5785         return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
5786     }                                                                                             \
5787     type unbox_##type(type boxed) {                                                               \
5788         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
5789         if (!elt) {                                                                               \
5790             if constexpr (!std::is_same_v<type, VkFence>) {                                       \
5791                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))                                   \
5792                     << "Unbox " << boxed << " failed, not found.";                                \
5793             }                                                                                     \
5794             return VK_NULL_HANDLE;                                                                \
5795         }                                                                                         \
5796         return (type)elt->underlying;                                                             \
5797     }
5798 
5799     GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_API_IMPL)
GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_IMPL)5800     GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_IMPL)
5801 
5802     VkDecoderSnapshot* snapshot() { return &mSnapshot; }
5803 
5804    private:
isEmulatedInstanceExtension(const char * name) const5805     bool isEmulatedInstanceExtension(const char* name) const {
5806         for (auto emulatedExt : kEmulatedInstanceExtensions) {
5807             if (!strcmp(emulatedExt, name)) return true;
5808         }
5809         return false;
5810     }
5811 
isEmulatedDeviceExtension(const char * name) const5812     bool isEmulatedDeviceExtension(const char* name) const {
5813         for (auto emulatedExt : kEmulatedDeviceExtensions) {
5814             if (!strcmp(emulatedExt, name)) return true;
5815         }
5816         return false;
5817     }
5818 
supportEmulatedCompressedImageFormatProperty(VkFormat compressedFormat,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags)5819     bool supportEmulatedCompressedImageFormatProperty(VkFormat compressedFormat, VkImageType type,
5820                                                       VkImageTiling tiling, VkImageUsageFlags usage,
5821                                                       VkImageCreateFlags flags) {
5822         // BUG: 139193497
5823         return !(usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(type == VK_IMAGE_TYPE_1D);
5824     }
5825 
filteredDeviceExtensionNames(VulkanDispatch * vk,VkPhysicalDevice physicalDevice,uint32_t count,const char * const * extNames)5826     std::vector<const char*> filteredDeviceExtensionNames(VulkanDispatch* vk,
5827                                                           VkPhysicalDevice physicalDevice,
5828                                                           uint32_t count,
5829                                                           const char* const* extNames) {
5830         std::vector<const char*> res;
5831         std::vector<VkExtensionProperties> properties;
5832         VkResult result;
5833 
5834         for (uint32_t i = 0; i < count; ++i) {
5835             auto extName = extNames[i];
5836             if (!isEmulatedDeviceExtension(extName)) {
5837                 res.push_back(extName);
5838                 continue;
5839             }
5840         }
5841 
5842         result = enumerateDeviceExtensionProperties(vk, physicalDevice, nullptr, properties);
5843         if (result != VK_SUCCESS) {
5844             VKDGS_LOG("failed to enumerate device extensions");
5845             return res;
5846         }
5847 
5848         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME)) {
5849             res.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
5850         }
5851 
5852         if (hasDeviceExtension(properties, VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME)) {
5853             res.push_back(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME);
5854         }
5855 
5856         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME)) {
5857             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
5858         }
5859 
5860         if (hasDeviceExtension(properties, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) {
5861             res.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
5862         }
5863 
5864         if (hasDeviceExtension(properties, VK_KHR_SWAPCHAIN_EXTENSION_NAME)) {
5865             res.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
5866         }
5867 
5868 #ifdef _WIN32
5869         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME)) {
5870             res.push_back(VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
5871         }
5872 
5873         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME)) {
5874             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME);
5875         }
5876 #elif defined(__QNX__)
5877         // Note: VK_QNX_external_memory_screen_buffer is not supported in API translation,
5878         // decoding, etc. However, push name to indicate external memory support to guest
5879         if (hasDeviceExtension(properties, VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_EXTENSION_NAME)) {
5880             res.push_back(VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_EXTENSION_NAME);
5881         }
5882 
5883         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME)) {
5884             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
5885         }
5886 #elif __unix__
5887         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME)) {
5888             res.push_back(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
5889         }
5890 
5891         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME)) {
5892             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
5893         }
5894 #endif
5895 
5896 #ifdef __linux__
5897         // A dma-buf is a Linux kernel construct, commonly used with open-source DRM drivers.
5898         // See https://docs.kernel.org/driver-api/dma-buf.html for details.
5899         if (hasDeviceExtension(properties, VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME)) {
5900             res.push_back(VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME);
5901         }
5902 #endif
5903         return res;
5904     }
5905 
filteredInstanceExtensionNames(uint32_t count,const char * const * extNames)5906     std::vector<const char*> filteredInstanceExtensionNames(uint32_t count,
5907                                                             const char* const* extNames) {
5908         std::vector<const char*> res;
5909         for (uint32_t i = 0; i < count; ++i) {
5910             auto extName = extNames[i];
5911             if (!isEmulatedInstanceExtension(extName)) {
5912                 res.push_back(extName);
5913             }
5914         }
5915 
5916         if (m_emu->instanceSupportsExternalMemoryCapabilities) {
5917             res.push_back(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME);
5918         }
5919 
5920         if (m_emu->instanceSupportsExternalSemaphoreCapabilities) {
5921             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
5922         }
5923 
5924         if (m_emu->debugUtilsAvailableAndRequested) {
5925             res.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
5926         }
5927 
5928         if (m_emu->instanceSupportsSurface) {
5929             res.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
5930         }
5931 
5932         return res;
5933     }
5934 
memPropsOfDeviceLocked(VkDevice device)5935     VkPhysicalDeviceMemoryProperties* memPropsOfDeviceLocked(VkDevice device) {
5936         auto* physdev = android::base::find(mDeviceToPhysicalDevice, device);
5937         if (!physdev) return nullptr;
5938 
5939         auto* physdevInfo = android::base::find(mPhysdevInfo, *physdev);
5940         if (!physdevInfo) return nullptr;
5941 
5942         return &physdevInfo->memoryProperties;
5943     }
5944 
getDefaultQueueForDeviceLocked(VkDevice device,VkQueue * queue,uint32_t * queueFamilyIndex,Lock ** queueLock)5945     bool getDefaultQueueForDeviceLocked(VkDevice device, VkQueue* queue, uint32_t* queueFamilyIndex,
5946                                         Lock** queueLock) {
5947         auto* deviceInfo = android::base::find(mDeviceInfo, device);
5948         if (!deviceInfo) return false;
5949 
5950         auto zeroIt = deviceInfo->queues.find(0);
5951         if (zeroIt == deviceInfo->queues.end() || zeroIt->second.empty()) {
5952             // Get the first queue / queueFamilyIndex
5953             // that does show up.
5954             for (const auto& it : deviceInfo->queues) {
5955                 auto index = it.first;
5956                 for (auto& deviceQueue : it.second) {
5957                     *queue = deviceQueue;
5958                     *queueFamilyIndex = index;
5959                     *queueLock = mQueueInfo.at(deviceQueue).lock;
5960                     return true;
5961                 }
5962             }
5963             // Didn't find anything, fail.
5964             return false;
5965         } else {
5966             // Use queue family index 0.
5967             *queue = zeroIt->second[0];
5968             *queueFamilyIndex = 0;
5969             *queueLock = mQueueInfo.at(zeroIt->second[0]).lock;
5970             return true;
5971         }
5972 
5973         return false;
5974     }
5975 
updateImageMemorySizeLocked(VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)5976     void updateImageMemorySizeLocked(VkDevice device, VkImage image,
5977                                      VkMemoryRequirements* pMemoryRequirements) {
5978         auto* deviceInfo = android::base::find(mDeviceInfo, device);
5979         if (!deviceInfo->emulateTextureEtc2 && !deviceInfo->emulateTextureAstc) {
5980             return;
5981         }
5982         auto* imageInfo = android::base::find(mImageInfo, image);
5983         if (!imageInfo) return;
5984         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
5985         if (!deviceInfo->needEmulatedDecompression(cmpInfo)) {
5986             return;
5987         }
5988         *pMemoryRequirements = cmpInfo.getMemoryRequirements();
5989     }
5990 
5991     // Whether the VkInstance associated with this physical device was created by ANGLE
isAngleInstance(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)5992     bool isAngleInstance(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
5993         std::lock_guard<std::recursive_mutex> lock(mLock);
5994         VkInstance* instance = android::base::find(mPhysicalDeviceToInstance, physicalDevice);
5995         if (!instance) return false;
5996         InstanceInfo* instanceInfo = android::base::find(mInstanceInfo, *instance);
5997         if (!instanceInfo) return false;
5998         return instanceInfo->isAngle;
5999     }
6000 
enableEmulatedEtc2(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)6001     bool enableEmulatedEtc2(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
6002         if (!m_emu->enableEtc2Emulation) return false;
6003 
6004         // Don't enable ETC2 emulation for ANGLE, let it do its own emulation.
6005         return !isAngleInstance(physicalDevice, vk);
6006     }
6007 
enableEmulatedAstc(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)6008     bool enableEmulatedAstc(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
6009         if (m_emu->astcLdrEmulationMode == AstcEmulationMode::Disabled) {
6010             return false;
6011         }
6012 
6013         // Don't enable ASTC emulation for ANGLE, let it do its own emulation.
6014         return !isAngleInstance(physicalDevice, vk);
6015     }
6016 
needEmulatedEtc2(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)6017     bool needEmulatedEtc2(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
6018         if (!enableEmulatedEtc2(physicalDevice, vk)) {
6019             return false;
6020         }
6021         VkPhysicalDeviceFeatures feature;
6022         vk->vkGetPhysicalDeviceFeatures(physicalDevice, &feature);
6023         return !feature.textureCompressionETC2;
6024     }
6025 
needEmulatedAstc(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)6026     bool needEmulatedAstc(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
6027         if (!enableEmulatedAstc(physicalDevice, vk)) {
6028             return false;
6029         }
6030         VkPhysicalDeviceFeatures feature;
6031         vk->vkGetPhysicalDeviceFeatures(physicalDevice, &feature);
6032         return !feature.textureCompressionASTC_LDR;
6033     }
6034 
supportsSwapchainMaintenance1(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)6035     bool supportsSwapchainMaintenance1(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
6036         bool hasGetPhysicalDeviceFeatures2 = false;
6037         bool hasGetPhysicalDeviceFeatures2KHR = false;
6038 
6039         {
6040             std::lock_guard<std::recursive_mutex> lock(mLock);
6041 
6042             auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
6043             if (!physdevInfo) {
6044                 return false;
6045             }
6046 
6047             auto instance = mPhysicalDeviceToInstance[physicalDevice];
6048             auto* instanceInfo = android::base::find(mInstanceInfo, instance);
6049             if (!instanceInfo) {
6050                 return false;
6051             }
6052 
6053             if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
6054                 physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
6055                 hasGetPhysicalDeviceFeatures2 = true;
6056             } else if (hasInstanceExtension(instance,
6057                                             VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
6058                 hasGetPhysicalDeviceFeatures2KHR = true;
6059             } else {
6060                 return false;
6061             }
6062         }
6063 
6064         VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT swapchainMaintenance1Features = {
6065             .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT,
6066             .pNext = nullptr,
6067             .swapchainMaintenance1 = VK_FALSE,
6068         };
6069         VkPhysicalDeviceFeatures2 features2 = {
6070             .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
6071             .pNext = &swapchainMaintenance1Features,
6072         };
6073         if (hasGetPhysicalDeviceFeatures2) {
6074             vk->vkGetPhysicalDeviceFeatures2(physicalDevice, &features2);
6075         } else if (hasGetPhysicalDeviceFeatures2KHR) {
6076             vk->vkGetPhysicalDeviceFeatures2KHR(physicalDevice, &features2);
6077         } else {
6078             return false;
6079         }
6080 
6081         return swapchainMaintenance1Features.swapchainMaintenance1 == VK_TRUE;
6082     }
6083 
isEmulatedCompressedTexture(VkFormat format,VkPhysicalDevice physicalDevice,VulkanDispatch * vk)6084     bool isEmulatedCompressedTexture(VkFormat format, VkPhysicalDevice physicalDevice,
6085                                      VulkanDispatch* vk) {
6086         return (gfxstream::vk::isEtc2(format) && needEmulatedEtc2(physicalDevice, vk)) ||
6087                (gfxstream::vk::isAstc(format) && needEmulatedAstc(physicalDevice, vk));
6088     }
6089 
6090     static const VkFormatFeatureFlags kEmulatedTextureBufferFeatureMask =
6091         VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT |
6092         VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT;
6093 
6094     static const VkFormatFeatureFlags kEmulatedTextureOptimalTilingMask =
6095         VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT |
6096         VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT |
6097         VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT;
6098 
maskFormatPropertiesForEmulatedTextures(VkFormatProperties * pFormatProp)6099     void maskFormatPropertiesForEmulatedTextures(VkFormatProperties* pFormatProp) {
6100         pFormatProp->linearTilingFeatures &= kEmulatedTextureBufferFeatureMask;
6101         pFormatProp->optimalTilingFeatures &= kEmulatedTextureOptimalTilingMask;
6102         pFormatProp->bufferFeatures &= kEmulatedTextureBufferFeatureMask;
6103     }
6104 
maskFormatPropertiesForEmulatedTextures(VkFormatProperties2 * pFormatProp)6105     void maskFormatPropertiesForEmulatedTextures(VkFormatProperties2* pFormatProp) {
6106         pFormatProp->formatProperties.linearTilingFeatures &= kEmulatedTextureBufferFeatureMask;
6107         pFormatProp->formatProperties.optimalTilingFeatures &= kEmulatedTextureOptimalTilingMask;
6108         pFormatProp->formatProperties.bufferFeatures &= kEmulatedTextureBufferFeatureMask;
6109     }
6110 
maskImageFormatPropertiesForEmulatedTextures(VkImageFormatProperties * pProperties)6111     void maskImageFormatPropertiesForEmulatedTextures(VkImageFormatProperties* pProperties) {
6112         // dEQP-VK.api.info.image_format_properties.2d.optimal#etc2_r8g8b8_unorm_block
6113         pProperties->sampleCounts &= VK_SAMPLE_COUNT_1_BIT;
6114     }
6115 
6116     template <class VkFormatProperties1or2>
getPhysicalDeviceFormatPropertiesCore(std::function<void (VkPhysicalDevice,VkFormat,VkFormatProperties1or2 *)> getPhysicalDeviceFormatPropertiesFunc,VulkanDispatch * vk,VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties1or2 * pFormatProperties)6117     void getPhysicalDeviceFormatPropertiesCore(
6118         std::function<void(VkPhysicalDevice, VkFormat, VkFormatProperties1or2*)>
6119             getPhysicalDeviceFormatPropertiesFunc,
6120         VulkanDispatch* vk, VkPhysicalDevice physicalDevice, VkFormat format,
6121         VkFormatProperties1or2* pFormatProperties) {
6122         if (isEmulatedCompressedTexture(format, physicalDevice, vk)) {
6123             getPhysicalDeviceFormatPropertiesFunc(
6124                 physicalDevice, CompressedImageInfo::getOutputFormat(format),
6125                 pFormatProperties);
6126             maskFormatPropertiesForEmulatedTextures(pFormatProperties);
6127             return;
6128         }
6129         getPhysicalDeviceFormatPropertiesFunc(physicalDevice, format, pFormatProperties);
6130     }
6131 
executePreprocessRecursive(int level,VkCommandBuffer cmdBuffer)6132     void executePreprocessRecursive(int level, VkCommandBuffer cmdBuffer) {
6133         auto* cmdBufferInfo = android::base::find(mCmdBufferInfo, cmdBuffer);
6134         if (!cmdBufferInfo) return;
6135         for (const auto& func : cmdBufferInfo->preprocessFuncs) {
6136             func();
6137         }
6138         // TODO: fix
6139         // for (const auto& subCmd : cmdBufferInfo->subCmds) {
6140         // executePreprocessRecursive(level + 1, subCmd);
6141         // }
6142     }
6143 
executePreprocessRecursive(const VkSubmitInfo & submit)6144     void executePreprocessRecursive(const VkSubmitInfo& submit) {
6145         for (uint32_t c = 0; c < submit.commandBufferCount; c++) {
6146             executePreprocessRecursive(0, submit.pCommandBuffers[c]);
6147         }
6148     }
6149 
executePreprocessRecursive(const VkSubmitInfo2 & submit)6150     void executePreprocessRecursive(const VkSubmitInfo2& submit) {
6151         for (uint32_t c = 0; c < submit.commandBufferInfoCount; c++) {
6152             executePreprocessRecursive(0, submit.pCommandBufferInfos[c].commandBuffer);
6153         }
6154     }
6155 
6156     template <typename VkHandleToInfoMap,
6157               typename HandleType = typename std::decay_t<VkHandleToInfoMap>::key_type>
findDeviceObjects(VkDevice device,const VkHandleToInfoMap & map)6158     std::vector<HandleType> findDeviceObjects(VkDevice device, const VkHandleToInfoMap& map) {
6159         std::vector<HandleType> objectsFromDevice;
6160         for (const auto& [objectHandle, objectInfo] : map) {
6161             if (objectInfo.device == device) {
6162                 objectsFromDevice.push_back(objectHandle);
6163             }
6164         }
6165         return objectsFromDevice;
6166     }
6167 
6168     template <typename VkHandleToInfoMap, typename InfoMemberType,
6169               typename HandleType = typename std::decay_t<VkHandleToInfoMap>::key_type,
6170               typename InfoType = typename std::decay_t<VkHandleToInfoMap>::value_type>
findDeviceObjects(VkDevice device,const VkHandleToInfoMap & map,InfoMemberType InfoType::* member)6171     std::vector<std::pair<HandleType, InfoMemberType>> findDeviceObjects(
6172         VkDevice device, const VkHandleToInfoMap& map, InfoMemberType InfoType::*member) {
6173         std::vector<std::pair<HandleType, InfoMemberType>> objectsFromDevice;
6174         for (const auto& [objectHandle, objectInfo] : map) {
6175             if (objectInfo.device == device) {
6176                 objectsFromDevice.emplace_back(objectHandle, objectInfo.*member);
6177             }
6178         }
6179         return objectsFromDevice;
6180     }
6181 
teardownInstanceLocked(VkInstance instance)6182     void teardownInstanceLocked(VkInstance instance) {
6183         std::vector<VkDevice> devicesToDestroy;
6184         std::vector<VulkanDispatch*> devicesToDestroyDispatches;
6185 
6186         for (auto it : mDeviceToPhysicalDevice) {
6187             auto* otherInstance = android::base::find(mPhysicalDeviceToInstance, it.second);
6188             if (!otherInstance) continue;
6189 
6190             if (instance == *otherInstance) {
6191                 devicesToDestroy.push_back(it.first);
6192                 devicesToDestroyDispatches.push_back(
6193                     dispatch_VkDevice(mDeviceInfo[it.first].boxed));
6194             }
6195         }
6196 
6197         for (uint32_t i = 0; i < devicesToDestroy.size(); ++i) {
6198             VkDevice deviceToDestroy = devicesToDestroy[i];
6199             VulkanDispatch* deviceToDestroyDispatch = devicesToDestroyDispatches[i];
6200 
6201             // https://bugs.chromium.org/p/chromium/issues/detail?id=1074600
6202             // it's important to idle the device before destroying it!
6203             deviceToDestroyDispatch->vkDeviceWaitIdle(deviceToDestroy);
6204 
6205             for (auto semaphore : findDeviceObjects(deviceToDestroy, mSemaphoreInfo)) {
6206                 destroySemaphoreLocked(deviceToDestroy, deviceToDestroyDispatch, semaphore,
6207                                        nullptr);
6208             }
6209 
6210             for (auto sampler : findDeviceObjects(deviceToDestroy, mSamplerInfo)) {
6211                 destroySamplerLocked(deviceToDestroy, deviceToDestroyDispatch, sampler, nullptr);
6212             }
6213 
6214             for (auto buffer : findDeviceObjects(deviceToDestroy, mBufferInfo)) {
6215                 deviceToDestroyDispatch->vkDestroyBuffer(deviceToDestroy, buffer, nullptr);
6216                 mBufferInfo.erase(buffer);
6217             }
6218 
6219             for (auto imageView : findDeviceObjects(deviceToDestroy, mImageViewInfo)) {
6220                 deviceToDestroyDispatch->vkDestroyImageView(deviceToDestroy, imageView, nullptr);
6221                 mImageViewInfo.erase(imageView);
6222             }
6223 
6224             for (auto image : findDeviceObjects(deviceToDestroy, mImageInfo)) {
6225                 destroyImageLocked(deviceToDestroy, deviceToDestroyDispatch, image, nullptr);
6226             }
6227 
6228             for (auto memory : findDeviceObjects(deviceToDestroy, mMemoryInfo)) {
6229                 freeMemoryLocked(deviceToDestroyDispatch, deviceToDestroy, memory, nullptr);
6230             }
6231 
6232             for (auto [commandBuffer, commandPool] :
6233                  findDeviceObjects(deviceToDestroy, mCmdBufferInfo, &CommandBufferInfo::cmdPool)) {
6234                 // The command buffer is freed with the vkDestroyCommandPool() below.
6235                 delete_VkCommandBuffer(unboxed_to_boxed_VkCommandBuffer(commandBuffer));
6236                 mCmdBufferInfo.erase(commandBuffer);
6237             }
6238 
6239             for (auto [commandPool, commandPoolBoxed] :
6240                  findDeviceObjects(deviceToDestroy, mCmdPoolInfo, &CommandPoolInfo::boxed)) {
6241                 deviceToDestroyDispatch->vkDestroyCommandPool(deviceToDestroy, commandPool,
6242                                                               nullptr);
6243                 delete_VkCommandPool(commandPoolBoxed);
6244                 mCmdPoolInfo.erase(commandPool);
6245             }
6246 
6247             for (auto [descriptorPool, descriptorPoolBoxed] : findDeviceObjects(
6248                      deviceToDestroy, mDescriptorPoolInfo, &DescriptorPoolInfo::boxed)) {
6249                 cleanupDescriptorPoolAllocedSetsLocked(descriptorPool, /*isDestroy=*/true);
6250                 deviceToDestroyDispatch->vkDestroyDescriptorPool(deviceToDestroy, descriptorPool,
6251                                                                  nullptr);
6252                 delete_VkDescriptorPool(descriptorPoolBoxed);
6253                 mDescriptorPoolInfo.erase(descriptorPool);
6254             }
6255 
6256             for (auto [descriptorSetLayout, descriptorSetLayoutBoxed] : findDeviceObjects(
6257                      deviceToDestroy, mDescriptorSetLayoutInfo, &DescriptorSetLayoutInfo::boxed)) {
6258                 deviceToDestroyDispatch->vkDestroyDescriptorSetLayout(deviceToDestroy,
6259                                                                       descriptorSetLayout, nullptr);
6260                 delete_VkDescriptorSetLayout(descriptorSetLayoutBoxed);
6261                 mDescriptorSetLayoutInfo.erase(descriptorSetLayout);
6262             }
6263 
6264             for (auto shaderModule : findDeviceObjects(deviceToDestroy, mShaderModuleInfo)) {
6265                 destroyShaderModuleLocked(deviceToDestroy, deviceToDestroyDispatch, shaderModule,
6266                                           nullptr);
6267             }
6268 
6269             for (auto pipeline : findDeviceObjects(deviceToDestroy, mPipelineInfo)) {
6270                 destroyPipelineLocked(deviceToDestroy, deviceToDestroyDispatch, pipeline, nullptr);
6271             }
6272 
6273             for (auto pipelineCache : findDeviceObjects(deviceToDestroy, mPipelineCacheInfo)) {
6274                 destroyPipelineCacheLocked(deviceToDestroy, deviceToDestroyDispatch, pipelineCache,
6275                                            nullptr);
6276             }
6277 
6278             for (auto framebuffer : findDeviceObjects(deviceToDestroy, mFramebufferInfo)) {
6279                 destroyFramebufferLocked(deviceToDestroy, deviceToDestroyDispatch, framebuffer,
6280                                          nullptr);
6281             }
6282 
6283             for (auto renderPass : findDeviceObjects(deviceToDestroy, mRenderPassInfo)) {
6284                 destroyRenderPassLocked(deviceToDestroy, deviceToDestroyDispatch, renderPass,
6285                                         nullptr);
6286             }
6287         }
6288 
6289         for (VkDevice deviceToDestroy : devicesToDestroy) {
6290             destroyDeviceLocked(deviceToDestroy, nullptr);
6291             mDeviceInfo.erase(deviceToDestroy);
6292             mDeviceToPhysicalDevice.erase(deviceToDestroy);
6293         }
6294 
6295         // TODO: Clean up the physical device info in `mPhysdevInfo` but we need to be careful
6296         // as the Vulkan spec does not guarantee that the VkPhysicalDevice handles returned are
6297         // unique per VkInstance.
6298     }
6299 
6300     typedef std::function<void()> PreprocessFunc;
6301     struct CommandBufferInfo {
6302         std::vector<PreprocessFunc> preprocessFuncs = {};
6303         std::vector<VkCommandBuffer> subCmds = {};
6304         VkDevice device = VK_NULL_HANDLE;
6305         VkCommandPool cmdPool = VK_NULL_HANDLE;
6306         VkCommandBuffer boxed = VK_NULL_HANDLE;
6307         DebugUtilsHelper debugUtilsHelper = DebugUtilsHelper::withUtilsDisabled();
6308 
6309         // Most recently bound compute pipeline and descriptor sets. We save it here so that we can
6310         // restore it after doing emulated texture decompression.
6311         VkPipeline computePipeline = VK_NULL_HANDLE;
6312         uint32_t firstSet = 0;
6313         VkPipelineLayout descriptorLayout = VK_NULL_HANDLE;
6314         std::vector<VkDescriptorSet> descriptorSets;
6315         std::vector<uint32_t> dynamicOffsets;
6316 
resetgfxstream::vk::VkDecoderGlobalState::Impl::CommandBufferInfo6317         void reset() {
6318             preprocessFuncs.clear();
6319             subCmds.clear();
6320             computePipeline = VK_NULL_HANDLE;
6321             firstSet = 0;
6322             descriptorLayout = VK_NULL_HANDLE;
6323             descriptorSets.clear();
6324             dynamicOffsets.clear();
6325         }
6326     };
6327 
6328     struct CommandPoolInfo {
6329         VkDevice device = VK_NULL_HANDLE;
6330         VkCommandPool boxed = VK_NULL_HANDLE;
6331         std::unordered_set<VkCommandBuffer> cmdBuffers = {};
6332     };
6333 
removeCommandBufferInfo(const std::unordered_set<VkCommandBuffer> & cmdBuffers)6334     void removeCommandBufferInfo(const std::unordered_set<VkCommandBuffer>& cmdBuffers) {
6335         for (const auto& cmdBuffer : cmdBuffers) {
6336             mCmdBufferInfo.erase(cmdBuffer);
6337         }
6338     }
6339 
isDescriptorTypeImageInfo(VkDescriptorType descType)6340     bool isDescriptorTypeImageInfo(VkDescriptorType descType) {
6341         return (descType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
6342                (descType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
6343                (descType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
6344                (descType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) ||
6345                (descType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
6346     }
6347 
isDescriptorTypeBufferInfo(VkDescriptorType descType)6348     bool isDescriptorTypeBufferInfo(VkDescriptorType descType) {
6349         return (descType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
6350                (descType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
6351                (descType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
6352                (descType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
6353     }
6354 
isDescriptorTypeBufferView(VkDescriptorType descType)6355     bool isDescriptorTypeBufferView(VkDescriptorType descType) {
6356         return (descType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
6357                (descType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
6358     }
6359 
6360     struct DescriptorUpdateTemplateInfo {
6361         VkDescriptorUpdateTemplateCreateInfo createInfo;
6362         std::vector<VkDescriptorUpdateTemplateEntry> linearizedTemplateEntries;
6363         // Preallocated pData
6364         std::vector<uint8_t> data;
6365         size_t imageInfoStart;
6366         size_t bufferInfoStart;
6367         size_t bufferViewStart;
6368         size_t inlineUniformBlockStart;
6369     };
6370 
calcLinearizedDescriptorUpdateTemplateInfo(const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo)6371     DescriptorUpdateTemplateInfo calcLinearizedDescriptorUpdateTemplateInfo(
6372         const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo) {
6373         DescriptorUpdateTemplateInfo res;
6374         res.createInfo = *pCreateInfo;
6375 
6376         size_t numImageInfos = 0;
6377         size_t numBufferInfos = 0;
6378         size_t numBufferViews = 0;
6379         size_t numInlineUniformBlocks = 0;
6380 
6381         for (uint32_t i = 0; i < pCreateInfo->descriptorUpdateEntryCount; ++i) {
6382             const auto& entry = pCreateInfo->pDescriptorUpdateEntries[i];
6383             auto type = entry.descriptorType;
6384             auto count = entry.descriptorCount;
6385             if (isDescriptorTypeImageInfo(type)) {
6386                 numImageInfos += count;
6387             } else if (isDescriptorTypeBufferInfo(type)) {
6388                 numBufferInfos += count;
6389             } else if (isDescriptorTypeBufferView(type)) {
6390                 numBufferViews += count;
6391             } else if (type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
6392                 numInlineUniformBlocks += count;
6393             } else {
6394                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
6395                     << "unknown descriptor type 0x" << std::hex << type;
6396             }
6397         }
6398 
6399         size_t imageInfoBytes = numImageInfos * sizeof(VkDescriptorImageInfo);
6400         size_t bufferInfoBytes = numBufferInfos * sizeof(VkDescriptorBufferInfo);
6401         size_t bufferViewBytes = numBufferViews * sizeof(VkBufferView);
6402         size_t inlineUniformBlockBytes = numInlineUniformBlocks;
6403 
6404         res.data.resize(imageInfoBytes + bufferInfoBytes + bufferViewBytes +
6405                         inlineUniformBlockBytes);
6406         res.imageInfoStart = 0;
6407         res.bufferInfoStart = imageInfoBytes;
6408         res.bufferViewStart = imageInfoBytes + bufferInfoBytes;
6409         res.inlineUniformBlockStart = imageInfoBytes + bufferInfoBytes + bufferViewBytes;
6410 
6411         size_t imageInfoCount = 0;
6412         size_t bufferInfoCount = 0;
6413         size_t bufferViewCount = 0;
6414         size_t inlineUniformBlockCount = 0;
6415 
6416         for (uint32_t i = 0; i < pCreateInfo->descriptorUpdateEntryCount; ++i) {
6417             const auto& entry = pCreateInfo->pDescriptorUpdateEntries[i];
6418             VkDescriptorUpdateTemplateEntry entryForHost = entry;
6419 
6420             auto type = entry.descriptorType;
6421 
6422             if (isDescriptorTypeImageInfo(type)) {
6423                 entryForHost.offset =
6424                     res.imageInfoStart + imageInfoCount * sizeof(VkDescriptorImageInfo);
6425                 entryForHost.stride = sizeof(VkDescriptorImageInfo);
6426                 ++imageInfoCount;
6427             } else if (isDescriptorTypeBufferInfo(type)) {
6428                 entryForHost.offset =
6429                     res.bufferInfoStart + bufferInfoCount * sizeof(VkDescriptorBufferInfo);
6430                 entryForHost.stride = sizeof(VkDescriptorBufferInfo);
6431                 ++bufferInfoCount;
6432             } else if (isDescriptorTypeBufferView(type)) {
6433                 entryForHost.offset = res.bufferViewStart + bufferViewCount * sizeof(VkBufferView);
6434                 entryForHost.stride = sizeof(VkBufferView);
6435                 ++bufferViewCount;
6436             } else if (type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
6437                 entryForHost.offset = res.inlineUniformBlockStart + inlineUniformBlockCount;
6438                 entryForHost.stride = 0;
6439                 inlineUniformBlockCount += entryForHost.descriptorCount;
6440             } else {
6441                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
6442                     << "unknown descriptor type 0x" << std::hex << type;
6443             }
6444 
6445             res.linearizedTemplateEntries.push_back(entryForHost);
6446         }
6447 
6448         res.createInfo.pDescriptorUpdateEntries = res.linearizedTemplateEntries.data();
6449 
6450         return res;
6451     }
6452 
registerDescriptorUpdateTemplate(VkDescriptorUpdateTemplate descriptorUpdateTemplate,const DescriptorUpdateTemplateInfo & info)6453     void registerDescriptorUpdateTemplate(VkDescriptorUpdateTemplate descriptorUpdateTemplate,
6454                                           const DescriptorUpdateTemplateInfo& info) {
6455         std::lock_guard<std::recursive_mutex> lock(mLock);
6456         mDescriptorUpdateTemplateInfo[descriptorUpdateTemplate] = info;
6457     }
6458 
unregisterDescriptorUpdateTemplate(VkDescriptorUpdateTemplate descriptorUpdateTemplate)6459     void unregisterDescriptorUpdateTemplate(VkDescriptorUpdateTemplate descriptorUpdateTemplate) {
6460         std::lock_guard<std::recursive_mutex> lock(mLock);
6461         mDescriptorUpdateTemplateInfo.erase(descriptorUpdateTemplate);
6462     }
6463 
6464     // Returns the VkInstance associated with a VkDevice, or null if it's not found
deviceToInstanceLocked(VkDevice device)6465     VkInstance* deviceToInstanceLocked(VkDevice device) {
6466         auto* physicalDevice = android::base::find(mDeviceToPhysicalDevice, device);
6467         if (!physicalDevice) return nullptr;
6468         return android::base::find(mPhysicalDeviceToInstance, *physicalDevice);
6469     }
6470 
6471     VulkanDispatch* m_vk;
6472     VkEmulation* m_emu;
6473     emugl::RenderDocWithMultipleVkInstances* mRenderDocWithMultipleVkInstances = nullptr;
6474     bool mSnapshotsEnabled = false;
6475     bool mVkCleanupEnabled = true;
6476     bool mLogging = false;
6477     bool mVerbosePrints = false;
6478     bool mUseOldMemoryCleanupPath = false;
6479     bool mGuestUsesAngle = false;
6480 
6481     std::recursive_mutex mLock;
6482 
6483     // We always map the whole size on host.
6484     // This makes it much easier to implement
6485     // the memory map API.
6486     struct MemoryInfo {
6487         // This indicates whether the VkDecoderGlobalState needs to clean up
6488         // and unmap the mapped memory; only the owner of the mapped memory
6489         // should call unmap.
6490         bool needUnmap = false;
6491         // When ptr is null, it means the VkDeviceMemory object
6492         // was not allocated with the HOST_VISIBLE property.
6493         void* ptr = nullptr;
6494         VkDeviceSize size;
6495         // GLDirectMem info
6496         bool directMapped = false;
6497         bool virtioGpuMapped = false;
6498         uint32_t caching = 0;
6499         uint64_t guestPhysAddr = 0;
6500         void* pageAlignedHva = nullptr;
6501         uint64_t sizeToPage = 0;
6502         uint64_t hostmemId = 0;
6503         VkDevice device = VK_NULL_HANDLE;
6504         MTLTextureRef mtlTexture = nullptr;
6505         uint32_t memoryIndex = 0;
6506         // Set if the memory is backed by shared memory.
6507         std::optional<SharedMemory> sharedMemory;
6508 
6509         // virtio-gpu blobs
6510         uint64_t blobId = 0;
6511 
6512         // Color buffer, provided via vkAllocateMemory().
6513         HandleType boundColorBuffer = 0;
6514     };
6515 
6516     struct InstanceInfo {
6517         std::vector<std::string> enabledExtensionNames;
6518         uint32_t apiVersion = VK_MAKE_VERSION(1, 0, 0);
6519         VkInstance boxed = nullptr;
6520         bool isAngle = false;
6521     };
6522 
6523     struct PhysicalDeviceInfo {
6524         VkPhysicalDeviceProperties props;
6525         VkPhysicalDeviceMemoryProperties memoryProperties;
6526         std::vector<VkQueueFamilyProperties> queueFamilyProperties;
6527         VkPhysicalDevice boxed = nullptr;
6528     };
6529 
6530     struct DeviceInfo {
6531         std::unordered_map<uint32_t, std::vector<VkQueue>> queues;
6532         std::vector<std::string> enabledExtensionNames;
6533         bool emulateTextureEtc2 = false;
6534         bool emulateTextureAstc = false;
6535         bool useAstcCpuDecompression = false;
6536         VkPhysicalDevice physicalDevice;
6537         VkDevice boxed = nullptr;
6538         DebugUtilsHelper debugUtilsHelper = DebugUtilsHelper::withUtilsDisabled();
6539         std::unique_ptr<ExternalFencePool<VulkanDispatch>> externalFencePool = nullptr;
6540         std::set<VkFormat> imageFormats = {};  // image formats used on this device
6541         std::unique_ptr<GpuDecompressionPipelineManager> decompPipelines = nullptr;
6542 
6543         // True if this is a compressed image that needs to be decompressed on the GPU (with our
6544         // compute shader)
needGpuDecompressiongfxstream::vk::VkDecoderGlobalState::Impl::DeviceInfo6545         bool needGpuDecompression(const CompressedImageInfo& cmpInfo) {
6546             return ((cmpInfo.isEtc2() && emulateTextureEtc2) ||
6547                     (cmpInfo.isAstc() && emulateTextureAstc && !useAstcCpuDecompression));
6548         }
needEmulatedDecompressiongfxstream::vk::VkDecoderGlobalState::Impl::DeviceInfo6549         bool needEmulatedDecompression(const CompressedImageInfo& cmpInfo) {
6550             return ((cmpInfo.isEtc2() && emulateTextureEtc2) ||
6551                     (cmpInfo.isAstc() && emulateTextureAstc));
6552         }
needEmulatedDecompressiongfxstream::vk::VkDecoderGlobalState::Impl::DeviceInfo6553         bool needEmulatedDecompression(VkFormat format) {
6554             return (gfxstream::vk::isEtc2(format) && emulateTextureEtc2) || (gfxstream::vk::isAstc(format) && emulateTextureAstc);
6555         }
6556     };
6557 
6558     struct QueueInfo {
6559         Lock* lock = nullptr;
6560         VkDevice device;
6561         uint32_t queueFamilyIndex;
6562         VkQueue boxed = nullptr;
6563         uint32_t sequenceNumber = 0;
6564     };
6565 
6566     struct BufferInfo {
6567         VkDevice device;
6568         VkDeviceMemory memory = 0;
6569         VkDeviceSize memoryOffset = 0;
6570         VkDeviceSize size;
6571     };
6572 
6573     struct ImageInfo {
6574         VkDevice device;
6575         VkImageCreateInfo imageCreateInfoShallow;
6576         std::shared_ptr<AndroidNativeBufferInfo> anbInfo;
6577         CompressedImageInfo cmpInfo;
6578 
6579         // Color buffer, provided via vkAllocateMemory().
6580         HandleType boundColorBuffer = 0;
6581     };
6582 
6583     struct ImageViewInfo {
6584         VkDevice device;
6585         bool needEmulatedAlpha = false;
6586 
6587         // Color buffer, provided via vkAllocateMemory().
6588         HandleType boundColorBuffer = 0;
6589     };
6590 
6591     struct SamplerInfo {
6592         VkDevice device;
6593         bool needEmulatedAlpha = false;
6594         VkSamplerCreateInfo createInfo = {};
6595         VkSampler emulatedborderSampler = VK_NULL_HANDLE;
6596         android::base::BumpPool pool = android::base::BumpPool(256);
6597         SamplerInfo() = default;
operator =gfxstream::vk::VkDecoderGlobalState::Impl::SamplerInfo6598         SamplerInfo& operator=(const SamplerInfo& other) {
6599             deepcopy_VkSamplerCreateInfo(&pool, VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
6600                                          &other.createInfo, &createInfo);
6601             device = other.device;
6602             needEmulatedAlpha = other.needEmulatedAlpha;
6603             emulatedborderSampler = other.emulatedborderSampler;
6604             return *this;
6605         }
SamplerInfogfxstream::vk::VkDecoderGlobalState::Impl::SamplerInfo6606         SamplerInfo(const SamplerInfo& other) { *this = other; }
6607         SamplerInfo(SamplerInfo&& other) = delete;
6608         SamplerInfo& operator=(SamplerInfo&& other) = delete;
6609     };
6610 
6611     struct FenceInfo {
6612         VkDevice device = VK_NULL_HANDLE;
6613         VkFence boxed = VK_NULL_HANDLE;
6614         VulkanDispatch* vk = nullptr;
6615 
6616         StaticLock lock;
6617         android::base::ConditionVariable cv;
6618 
6619         enum class State {
6620             kWaitable,
6621             kNotWaitable,
6622             kWaiting,
6623         };
6624         State state = State::kNotWaitable;
6625 
6626         bool external = false;
6627     };
6628 
6629     struct SemaphoreInfo {
6630         VkDevice device;
6631         int externalHandleId = 0;
6632         VK_EXT_SYNC_HANDLE externalHandle = VK_EXT_SYNC_HANDLE_INVALID;
6633     };
6634 
6635     struct DescriptorSetLayoutInfo {
6636         VkDevice device = 0;
6637         VkDescriptorSetLayout boxed = 0;
6638         VkDescriptorSetLayoutCreateInfo createInfo;
6639         std::vector<VkDescriptorSetLayoutBinding> bindings;
6640     };
6641 
6642     struct DescriptorPoolInfo {
6643         VkDevice device = 0;
6644         VkDescriptorPool boxed = 0;
6645         struct PoolState {
6646             VkDescriptorType type;
6647             uint32_t descriptorCount;
6648             uint32_t used;
6649         };
6650 
6651         VkDescriptorPoolCreateInfo createInfo;
6652         uint32_t maxSets;
6653         uint32_t usedSets;
6654         std::vector<PoolState> pools;
6655 
6656         std::unordered_map<VkDescriptorSet, VkDescriptorSet> allocedSetsToBoxed;
6657         std::vector<uint64_t> poolIds;
6658     };
6659 
6660     struct DescriptorSetInfo {
6661         VkDescriptorPool pool;
6662         std::vector<VkDescriptorSetLayoutBinding> bindings;
6663     };
6664 
6665     struct ShaderModuleInfo {
6666         VkDevice device;
6667     };
6668 
6669     struct PipelineCacheInfo {
6670         VkDevice device;
6671     };
6672 
6673     struct PipelineInfo {
6674         VkDevice device;
6675     };
6676 
6677     struct RenderPassInfo {
6678         VkDevice device;
6679     };
6680 
6681     struct FramebufferInfo {
6682         VkDevice device;
6683     };
6684 
isBindingFeasibleForAlloc(const DescriptorPoolInfo::PoolState & poolState,const VkDescriptorSetLayoutBinding & binding)6685     bool isBindingFeasibleForAlloc(const DescriptorPoolInfo::PoolState& poolState,
6686                                    const VkDescriptorSetLayoutBinding& binding) {
6687         if (binding.descriptorCount && (poolState.type != binding.descriptorType)) {
6688             return false;
6689         }
6690 
6691         uint32_t availDescriptorCount = poolState.descriptorCount - poolState.used;
6692 
6693         if (availDescriptorCount < binding.descriptorCount) {
6694             return false;
6695         }
6696 
6697         return true;
6698     }
6699 
isBindingFeasibleForFree(const DescriptorPoolInfo::PoolState & poolState,const VkDescriptorSetLayoutBinding & binding)6700     bool isBindingFeasibleForFree(const DescriptorPoolInfo::PoolState& poolState,
6701                                   const VkDescriptorSetLayoutBinding& binding) {
6702         if (poolState.type != binding.descriptorType) return false;
6703         if (poolState.used < binding.descriptorCount) return false;
6704         return true;
6705     }
6706 
allocBindingFeasible(const VkDescriptorSetLayoutBinding & binding,DescriptorPoolInfo::PoolState & poolState)6707     void allocBindingFeasible(const VkDescriptorSetLayoutBinding& binding,
6708                               DescriptorPoolInfo::PoolState& poolState) {
6709         poolState.used += binding.descriptorCount;
6710     }
6711 
freeBindingFeasible(const VkDescriptorSetLayoutBinding & binding,DescriptorPoolInfo::PoolState & poolState)6712     void freeBindingFeasible(const VkDescriptorSetLayoutBinding& binding,
6713                              DescriptorPoolInfo::PoolState& poolState) {
6714         poolState.used -= binding.descriptorCount;
6715     }
6716 
validateDescriptorSetAllocLocked(const VkDescriptorSetAllocateInfo * pAllocateInfo)6717     VkResult validateDescriptorSetAllocLocked(const VkDescriptorSetAllocateInfo* pAllocateInfo) {
6718         auto* poolInfo = android::base::find(mDescriptorPoolInfo, pAllocateInfo->descriptorPool);
6719         if (!poolInfo) return VK_ERROR_INITIALIZATION_FAILED;
6720 
6721         // Check the number of sets available.
6722         auto setsAvailable = poolInfo->maxSets - poolInfo->usedSets;
6723 
6724         if (setsAvailable < pAllocateInfo->descriptorSetCount) {
6725             return VK_ERROR_OUT_OF_POOL_MEMORY;
6726         }
6727 
6728         // Perform simulated allocation and error out with
6729         // VK_ERROR_OUT_OF_POOL_MEMORY if it fails.
6730         std::vector<DescriptorPoolInfo::PoolState> poolCopy = poolInfo->pools;
6731 
6732         for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; ++i) {
6733             auto setLayoutInfo =
6734                 android::base::find(mDescriptorSetLayoutInfo, pAllocateInfo->pSetLayouts[i]);
6735             if (!setLayoutInfo) return VK_ERROR_INITIALIZATION_FAILED;
6736 
6737             for (const auto& binding : setLayoutInfo->bindings) {
6738                 bool success = false;
6739                 for (auto& pool : poolCopy) {
6740                     if (!isBindingFeasibleForAlloc(pool, binding)) continue;
6741 
6742                     success = true;
6743                     allocBindingFeasible(binding, pool);
6744                     break;
6745                 }
6746 
6747                 if (!success) {
6748                     return VK_ERROR_OUT_OF_POOL_MEMORY;
6749                 }
6750             }
6751         }
6752         return VK_SUCCESS;
6753     }
6754 
applyDescriptorSetAllocationLocked(DescriptorPoolInfo & poolInfo,const std::vector<VkDescriptorSetLayoutBinding> & bindings)6755     void applyDescriptorSetAllocationLocked(
6756         DescriptorPoolInfo& poolInfo, const std::vector<VkDescriptorSetLayoutBinding>& bindings) {
6757         ++poolInfo.usedSets;
6758         for (const auto& binding : bindings) {
6759             for (auto& pool : poolInfo.pools) {
6760                 if (!isBindingFeasibleForAlloc(pool, binding)) continue;
6761                 allocBindingFeasible(binding, pool);
6762                 break;
6763             }
6764         }
6765     }
6766 
removeDescriptorSetAllocationLocked(DescriptorPoolInfo & poolInfo,const std::vector<VkDescriptorSetLayoutBinding> & bindings)6767     void removeDescriptorSetAllocationLocked(
6768         DescriptorPoolInfo& poolInfo, const std::vector<VkDescriptorSetLayoutBinding>& bindings) {
6769         --poolInfo.usedSets;
6770         for (const auto& binding : bindings) {
6771             for (auto& pool : poolInfo.pools) {
6772                 if (!isBindingFeasibleForFree(pool, binding)) continue;
6773                 freeBindingFeasible(binding, pool);
6774                 break;
6775             }
6776         }
6777     }
6778 
6779     template <class T>
6780     class NonDispatchableHandleInfo {
6781        public:
6782         T underlying;
6783     };
6784 
6785     std::unordered_map<VkInstance, InstanceInfo> mInstanceInfo;
6786     std::unordered_map<VkPhysicalDevice, PhysicalDeviceInfo> mPhysdevInfo;
6787     std::unordered_map<VkDevice, DeviceInfo> mDeviceInfo;
6788     std::unordered_map<VkImage, ImageInfo> mImageInfo;
6789     std::unordered_map<VkImageView, ImageViewInfo> mImageViewInfo;
6790     std::unordered_map<VkSampler, SamplerInfo> mSamplerInfo;
6791     std::unordered_map<VkCommandBuffer, CommandBufferInfo> mCmdBufferInfo;
6792     std::unordered_map<VkCommandPool, CommandPoolInfo> mCmdPoolInfo;
6793     // TODO: release CommandBufferInfo when a command pool is reset/released
6794 
6795     // Back-reference to the physical device associated with a particular
6796     // VkDevice, and the VkDevice corresponding to a VkQueue.
6797     std::unordered_map<VkDevice, VkPhysicalDevice> mDeviceToPhysicalDevice;
6798     std::unordered_map<VkPhysicalDevice, VkInstance> mPhysicalDeviceToInstance;
6799 
6800     std::unordered_map<VkQueue, QueueInfo> mQueueInfo;
6801     std::unordered_map<VkBuffer, BufferInfo> mBufferInfo;
6802 
6803     std::unordered_map<VkDeviceMemory, MemoryInfo> mMemoryInfo;
6804 
6805     std::unordered_map<VkShaderModule, ShaderModuleInfo> mShaderModuleInfo;
6806     std::unordered_map<VkPipelineCache, PipelineCacheInfo> mPipelineCacheInfo;
6807     std::unordered_map<VkPipeline, PipelineInfo> mPipelineInfo;
6808     std::unordered_map<VkRenderPass, RenderPassInfo> mRenderPassInfo;
6809     std::unordered_map<VkFramebuffer, FramebufferInfo> mFramebufferInfo;
6810 
6811     std::unordered_map<VkSemaphore, SemaphoreInfo> mSemaphoreInfo;
6812     std::unordered_map<VkFence, FenceInfo> mFenceInfo;
6813 
6814     std::unordered_map<VkDescriptorSetLayout, DescriptorSetLayoutInfo> mDescriptorSetLayoutInfo;
6815     std::unordered_map<VkDescriptorPool, DescriptorPoolInfo> mDescriptorPoolInfo;
6816     std::unordered_map<VkDescriptorSet, DescriptorSetInfo> mDescriptorSetInfo;
6817 
6818 #ifdef _WIN32
6819     int mSemaphoreId = 1;
genSemaphoreId()6820     int genSemaphoreId() {
6821         if (mSemaphoreId == -1) {
6822             mSemaphoreId = 1;
6823         }
6824         int res = mSemaphoreId;
6825         ++mSemaphoreId;
6826         return res;
6827     }
6828     std::unordered_map<int, VkSemaphore> mExternalSemaphoresById;
6829 #endif
6830     std::unordered_map<VkDescriptorUpdateTemplate, DescriptorUpdateTemplateInfo>
6831         mDescriptorUpdateTemplateInfo;
6832 
6833     VkDecoderSnapshot mSnapshot;
6834 
6835     std::vector<uint64_t> mCreatedHandlesForSnapshotLoad;
6836     size_t mCreatedHandlesForSnapshotLoadIndex = 0;
6837 
6838     Lock mOccupiedGpasLock;
6839     // Back-reference to the VkDeviceMemory that is occupying a particular
6840     // guest physical address
6841     struct OccupiedGpaInfo {
6842         VulkanDispatch* vk;
6843         VkDevice device;
6844         VkDeviceMemory memory;
6845         uint64_t gpa;
6846         size_t sizeToPage;
6847     };
6848     std::unordered_map<uint64_t, OccupiedGpaInfo> mOccupiedGpas;
6849 
6850     struct LinearImageCreateInfo {
6851         VkExtent3D extent;
6852         VkFormat format;
6853         VkImageUsageFlags usage;
6854 
toDefaultVkgfxstream::vk::VkDecoderGlobalState::Impl::LinearImageCreateInfo6855         VkImageCreateInfo toDefaultVk() const {
6856             return VkImageCreateInfo{
6857                 .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
6858                 .pNext = nullptr,
6859                 .flags = {},
6860                 .imageType = VK_IMAGE_TYPE_2D,
6861                 .format = format,
6862                 .extent = extent,
6863                 .mipLevels = 1,
6864                 .arrayLayers = 1,
6865                 .samples = VK_SAMPLE_COUNT_1_BIT,
6866                 .tiling = VK_IMAGE_TILING_LINEAR,
6867                 .usage = usage,
6868                 .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
6869                 .queueFamilyIndexCount = 0,
6870                 .pQueueFamilyIndices = nullptr,
6871                 .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
6872             };
6873         }
6874 
6875         struct Hash {
operator ()gfxstream::vk::VkDecoderGlobalState::Impl::LinearImageCreateInfo::Hash6876             std::size_t operator()(const LinearImageCreateInfo& ci) const {
6877                 std::size_t s = 0;
6878                 // Magic number used in boost::hash_combine().
6879                 constexpr size_t kHashMagic = 0x9e3779b9;
6880                 s ^= std::hash<uint32_t>{}(ci.extent.width) + kHashMagic + (s << 6) + (s >> 2);
6881                 s ^= std::hash<uint32_t>{}(ci.extent.height) + kHashMagic + (s << 6) + (s >> 2);
6882                 s ^= std::hash<uint32_t>{}(ci.extent.depth) + kHashMagic + (s << 6) + (s >> 2);
6883                 s ^= std::hash<VkFormat>{}(ci.format) + kHashMagic + (s << 6) + (s >> 2);
6884                 s ^= std::hash<VkImageUsageFlags>{}(ci.usage) + kHashMagic + (s << 6) + (s >> 2);
6885                 return s;
6886             }
6887         };
6888     };
6889 
operator ==(const LinearImageCreateInfo & a,const LinearImageCreateInfo & b)6890     friend bool operator==(const LinearImageCreateInfo& a, const LinearImageCreateInfo& b) {
6891         return a.extent.width == b.extent.width && a.extent.height == b.extent.height &&
6892                a.extent.depth == b.extent.depth && a.format == b.format && a.usage == b.usage;
6893     }
6894 
6895     struct LinearImageProperties {
6896         VkDeviceSize offset;
6897         VkDeviceSize rowPitchAlignment;
6898     };
6899 
6900     // TODO(liyl): Remove after removing the old vkGetLinearImageLayoutGOOGLE.
6901     std::unordered_map<VkFormat, LinearImageProperties> mPerFormatLinearImageProperties;
6902 
6903     std::unordered_map<LinearImageCreateInfo, LinearImageProperties, LinearImageCreateInfo::Hash>
6904         mLinearImageProperties;
6905 };
6906 
VkDecoderGlobalState()6907 VkDecoderGlobalState::VkDecoderGlobalState() : mImpl(new VkDecoderGlobalState::Impl()) {}
6908 
6909 VkDecoderGlobalState::~VkDecoderGlobalState() = default;
6910 
6911 static VkDecoderGlobalState* sGlobalDecoderState = nullptr;
6912 
6913 // static
get()6914 VkDecoderGlobalState* VkDecoderGlobalState::get() {
6915     if (sGlobalDecoderState) return sGlobalDecoderState;
6916     sGlobalDecoderState = new VkDecoderGlobalState;
6917     return sGlobalDecoderState;
6918 }
6919 
6920 // static
reset()6921 void VkDecoderGlobalState::reset() {
6922     delete sGlobalDecoderState;
6923     sGlobalDecoderState = nullptr;
6924 }
6925 
6926 // Snapshots
snapshotsEnabled() const6927 bool VkDecoderGlobalState::snapshotsEnabled() const { return mImpl->snapshotsEnabled(); }
6928 
vkCleanupEnabled() const6929 bool VkDecoderGlobalState::vkCleanupEnabled() const { return mImpl->vkCleanupEnabled(); }
6930 
save(android::base::Stream * stream)6931 void VkDecoderGlobalState::save(android::base::Stream* stream) { mImpl->save(stream); }
6932 
load(android::base::Stream * stream,GfxApiLogger & gfxLogger,HealthMonitor<> * healthMonitor)6933 void VkDecoderGlobalState::load(android::base::Stream* stream, GfxApiLogger& gfxLogger,
6934                                 HealthMonitor<>* healthMonitor) {
6935     mImpl->load(stream, gfxLogger, healthMonitor);
6936 }
6937 
lock()6938 void VkDecoderGlobalState::lock() { mImpl->lock(); }
6939 
unlock()6940 void VkDecoderGlobalState::unlock() { mImpl->unlock(); }
6941 
setCreatedHandlesForSnapshotLoad(const unsigned char * buffer)6942 size_t VkDecoderGlobalState::setCreatedHandlesForSnapshotLoad(const unsigned char* buffer) {
6943     return mImpl->setCreatedHandlesForSnapshotLoad(buffer);
6944 }
6945 
clearCreatedHandlesForSnapshotLoad()6946 void VkDecoderGlobalState::clearCreatedHandlesForSnapshotLoad() {
6947     mImpl->clearCreatedHandlesForSnapshotLoad();
6948 }
6949 
on_vkEnumerateInstanceVersion(android::base::BumpPool * pool,uint32_t * pApiVersion)6950 VkResult VkDecoderGlobalState::on_vkEnumerateInstanceVersion(android::base::BumpPool* pool,
6951                                                              uint32_t* pApiVersion) {
6952     return mImpl->on_vkEnumerateInstanceVersion(pool, pApiVersion);
6953 }
6954 
on_vkCreateInstance(android::base::BumpPool * pool,const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)6955 VkResult VkDecoderGlobalState::on_vkCreateInstance(android::base::BumpPool* pool,
6956                                                    const VkInstanceCreateInfo* pCreateInfo,
6957                                                    const VkAllocationCallbacks* pAllocator,
6958                                                    VkInstance* pInstance) {
6959     return mImpl->on_vkCreateInstance(pool, pCreateInfo, pAllocator, pInstance);
6960 }
6961 
on_vkDestroyInstance(android::base::BumpPool * pool,VkInstance instance,const VkAllocationCallbacks * pAllocator)6962 void VkDecoderGlobalState::on_vkDestroyInstance(android::base::BumpPool* pool, VkInstance instance,
6963                                                 const VkAllocationCallbacks* pAllocator) {
6964     mImpl->on_vkDestroyInstance(pool, instance, pAllocator);
6965 }
6966 
on_vkEnumeratePhysicalDevices(android::base::BumpPool * pool,VkInstance instance,uint32_t * physicalDeviceCount,VkPhysicalDevice * physicalDevices)6967 VkResult VkDecoderGlobalState::on_vkEnumeratePhysicalDevices(android::base::BumpPool* pool,
6968                                                              VkInstance instance,
6969                                                              uint32_t* physicalDeviceCount,
6970                                                              VkPhysicalDevice* physicalDevices) {
6971     return mImpl->on_vkEnumeratePhysicalDevices(pool, instance, physicalDeviceCount,
6972                                                 physicalDevices);
6973 }
6974 
on_vkGetPhysicalDeviceFeatures(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures * pFeatures)6975 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFeatures(android::base::BumpPool* pool,
6976                                                           VkPhysicalDevice physicalDevice,
6977                                                           VkPhysicalDeviceFeatures* pFeatures) {
6978     mImpl->on_vkGetPhysicalDeviceFeatures(pool, physicalDevice, pFeatures);
6979 }
6980 
on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)6981 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool* pool,
6982                                                            VkPhysicalDevice physicalDevice,
6983                                                            VkPhysicalDeviceFeatures2* pFeatures) {
6984     mImpl->on_vkGetPhysicalDeviceFeatures2(pool, physicalDevice, pFeatures);
6985 }
6986 
on_vkGetPhysicalDeviceFeatures2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2KHR * pFeatures)6987 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFeatures2KHR(
6988     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
6989     VkPhysicalDeviceFeatures2KHR* pFeatures) {
6990     mImpl->on_vkGetPhysicalDeviceFeatures2(pool, physicalDevice, pFeatures);
6991 }
6992 
on_vkGetPhysicalDeviceImageFormatProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)6993 VkResult VkDecoderGlobalState::on_vkGetPhysicalDeviceImageFormatProperties(
6994     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
6995     VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
6996     VkImageFormatProperties* pImageFormatProperties) {
6997     return mImpl->on_vkGetPhysicalDeviceImageFormatProperties(
6998         pool, physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
6999 }
on_vkGetPhysicalDeviceImageFormatProperties2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)7000 VkResult VkDecoderGlobalState::on_vkGetPhysicalDeviceImageFormatProperties2(
7001     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7002     const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
7003     VkImageFormatProperties2* pImageFormatProperties) {
7004     return mImpl->on_vkGetPhysicalDeviceImageFormatProperties2(
7005         pool, physicalDevice, pImageFormatInfo, pImageFormatProperties);
7006 }
on_vkGetPhysicalDeviceImageFormatProperties2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)7007 VkResult VkDecoderGlobalState::on_vkGetPhysicalDeviceImageFormatProperties2KHR(
7008     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7009     const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
7010     VkImageFormatProperties2* pImageFormatProperties) {
7011     return mImpl->on_vkGetPhysicalDeviceImageFormatProperties2(
7012         pool, physicalDevice, pImageFormatInfo, pImageFormatProperties);
7013 }
7014 
on_vkGetPhysicalDeviceFormatProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)7015 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFormatProperties(
7016     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
7017     VkFormatProperties* pFormatProperties) {
7018     mImpl->on_vkGetPhysicalDeviceFormatProperties(pool, physicalDevice, format, pFormatProperties);
7019 }
7020 
on_vkGetPhysicalDeviceFormatProperties2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)7021 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFormatProperties2(
7022     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
7023     VkFormatProperties2* pFormatProperties) {
7024     mImpl->on_vkGetPhysicalDeviceFormatProperties2(pool, physicalDevice, format, pFormatProperties);
7025 }
7026 
on_vkGetPhysicalDeviceFormatProperties2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)7027 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFormatProperties2KHR(
7028     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
7029     VkFormatProperties2* pFormatProperties) {
7030     mImpl->on_vkGetPhysicalDeviceFormatProperties2(pool, physicalDevice, format, pFormatProperties);
7031 }
7032 
on_vkGetPhysicalDeviceProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties * pProperties)7033 void VkDecoderGlobalState::on_vkGetPhysicalDeviceProperties(
7034     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7035     VkPhysicalDeviceProperties* pProperties) {
7036     mImpl->on_vkGetPhysicalDeviceProperties(pool, physicalDevice, pProperties);
7037 }
7038 
on_vkGetPhysicalDeviceProperties2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)7039 void VkDecoderGlobalState::on_vkGetPhysicalDeviceProperties2(
7040     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7041     VkPhysicalDeviceProperties2* pProperties) {
7042     mImpl->on_vkGetPhysicalDeviceProperties2(pool, physicalDevice, pProperties);
7043 }
7044 
on_vkGetPhysicalDeviceProperties2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)7045 void VkDecoderGlobalState::on_vkGetPhysicalDeviceProperties2KHR(
7046     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7047     VkPhysicalDeviceProperties2* pProperties) {
7048     mImpl->on_vkGetPhysicalDeviceProperties2(pool, physicalDevice, pProperties);
7049 }
7050 
on_vkGetPhysicalDeviceMemoryProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)7051 void VkDecoderGlobalState::on_vkGetPhysicalDeviceMemoryProperties(
7052     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7053     VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
7054     mImpl->on_vkGetPhysicalDeviceMemoryProperties(pool, physicalDevice, pMemoryProperties);
7055 }
7056 
on_vkGetPhysicalDeviceMemoryProperties2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)7057 void VkDecoderGlobalState::on_vkGetPhysicalDeviceMemoryProperties2(
7058     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7059     VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
7060     mImpl->on_vkGetPhysicalDeviceMemoryProperties2(pool, physicalDevice, pMemoryProperties);
7061 }
7062 
on_vkGetPhysicalDeviceMemoryProperties2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)7063 void VkDecoderGlobalState::on_vkGetPhysicalDeviceMemoryProperties2KHR(
7064     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7065     VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
7066     mImpl->on_vkGetPhysicalDeviceMemoryProperties2(pool, physicalDevice, pMemoryProperties);
7067 }
7068 
on_vkEnumerateDeviceExtensionProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)7069 VkResult VkDecoderGlobalState::on_vkEnumerateDeviceExtensionProperties(
7070     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, const char* pLayerName,
7071     uint32_t* pPropertyCount, VkExtensionProperties* pProperties) {
7072     return mImpl->on_vkEnumerateDeviceExtensionProperties(pool, physicalDevice, pLayerName,
7073                                                           pPropertyCount, pProperties);
7074 }
7075 
on_vkCreateDevice(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)7076 VkResult VkDecoderGlobalState::on_vkCreateDevice(android::base::BumpPool* pool,
7077                                                  VkPhysicalDevice physicalDevice,
7078                                                  const VkDeviceCreateInfo* pCreateInfo,
7079                                                  const VkAllocationCallbacks* pAllocator,
7080                                                  VkDevice* pDevice) {
7081     return mImpl->on_vkCreateDevice(pool, physicalDevice, pCreateInfo, pAllocator, pDevice);
7082 }
7083 
on_vkGetDeviceQueue(android::base::BumpPool * pool,VkDevice device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)7084 void VkDecoderGlobalState::on_vkGetDeviceQueue(android::base::BumpPool* pool, VkDevice device,
7085                                                uint32_t queueFamilyIndex, uint32_t queueIndex,
7086                                                VkQueue* pQueue) {
7087     mImpl->on_vkGetDeviceQueue(pool, device, queueFamilyIndex, queueIndex, pQueue);
7088 }
7089 
on_vkGetDeviceQueue2(android::base::BumpPool * pool,VkDevice device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)7090 void VkDecoderGlobalState::on_vkGetDeviceQueue2(android::base::BumpPool* pool, VkDevice device,
7091                                                 const VkDeviceQueueInfo2* pQueueInfo,
7092                                                 VkQueue* pQueue) {
7093     mImpl->on_vkGetDeviceQueue2(pool, device, pQueueInfo, pQueue);
7094 }
7095 
on_vkDestroyDevice(android::base::BumpPool * pool,VkDevice device,const VkAllocationCallbacks * pAllocator)7096 void VkDecoderGlobalState::on_vkDestroyDevice(android::base::BumpPool* pool, VkDevice device,
7097                                               const VkAllocationCallbacks* pAllocator) {
7098     mImpl->on_vkDestroyDevice(pool, device, pAllocator);
7099 }
7100 
on_vkCreateBuffer(android::base::BumpPool * pool,VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)7101 VkResult VkDecoderGlobalState::on_vkCreateBuffer(android::base::BumpPool* pool, VkDevice device,
7102                                                  const VkBufferCreateInfo* pCreateInfo,
7103                                                  const VkAllocationCallbacks* pAllocator,
7104                                                  VkBuffer* pBuffer) {
7105     return mImpl->on_vkCreateBuffer(pool, device, pCreateInfo, pAllocator, pBuffer);
7106 }
7107 
on_vkDestroyBuffer(android::base::BumpPool * pool,VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)7108 void VkDecoderGlobalState::on_vkDestroyBuffer(android::base::BumpPool* pool, VkDevice device,
7109                                               VkBuffer buffer,
7110                                               const VkAllocationCallbacks* pAllocator) {
7111     mImpl->on_vkDestroyBuffer(pool, device, buffer, pAllocator);
7112 }
7113 
on_vkBindBufferMemory(android::base::BumpPool * pool,VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)7114 VkResult VkDecoderGlobalState::on_vkBindBufferMemory(android::base::BumpPool* pool, VkDevice device,
7115                                                      VkBuffer buffer, VkDeviceMemory memory,
7116                                                      VkDeviceSize memoryOffset) {
7117     return mImpl->on_vkBindBufferMemory(pool, device, buffer, memory, memoryOffset);
7118 }
7119 
on_vkBindBufferMemory2(android::base::BumpPool * pool,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)7120 VkResult VkDecoderGlobalState::on_vkBindBufferMemory2(android::base::BumpPool* pool,
7121                                                       VkDevice device, uint32_t bindInfoCount,
7122                                                       const VkBindBufferMemoryInfo* pBindInfos) {
7123     return mImpl->on_vkBindBufferMemory2(pool, device, bindInfoCount, pBindInfos);
7124 }
7125 
on_vkBindBufferMemory2KHR(android::base::BumpPool * pool,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)7126 VkResult VkDecoderGlobalState::on_vkBindBufferMemory2KHR(android::base::BumpPool* pool,
7127                                                          VkDevice device, uint32_t bindInfoCount,
7128                                                          const VkBindBufferMemoryInfo* pBindInfos) {
7129     return mImpl->on_vkBindBufferMemory2KHR(pool, device, bindInfoCount, pBindInfos);
7130 }
7131 
on_vkCreateImage(android::base::BumpPool * pool,VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)7132 VkResult VkDecoderGlobalState::on_vkCreateImage(android::base::BumpPool* pool, VkDevice device,
7133                                                 const VkImageCreateInfo* pCreateInfo,
7134                                                 const VkAllocationCallbacks* pAllocator,
7135                                                 VkImage* pImage) {
7136     return mImpl->on_vkCreateImage(pool, device, pCreateInfo, pAllocator, pImage);
7137 }
7138 
on_vkDestroyImage(android::base::BumpPool * pool,VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)7139 void VkDecoderGlobalState::on_vkDestroyImage(android::base::BumpPool* pool, VkDevice device,
7140                                              VkImage image,
7141                                              const VkAllocationCallbacks* pAllocator) {
7142     mImpl->on_vkDestroyImage(pool, device, image, pAllocator);
7143 }
7144 
on_vkBindImageMemory(android::base::BumpPool * pool,VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)7145 VkResult VkDecoderGlobalState::on_vkBindImageMemory(android::base::BumpPool* pool, VkDevice device,
7146                                                     VkImage image, VkDeviceMemory memory,
7147                                                     VkDeviceSize memoryOffset) {
7148     return mImpl->on_vkBindImageMemory(pool, device, image, memory, memoryOffset);
7149 }
7150 
on_vkBindImageMemory2(android::base::BumpPool * pool,VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)7151 VkResult VkDecoderGlobalState::on_vkBindImageMemory2(android::base::BumpPool* pool, VkDevice device,
7152                                                      uint32_t bindInfoCount,
7153                                                      const VkBindImageMemoryInfo* pBindInfos) {
7154     return mImpl->on_vkBindImageMemory2(pool, device, bindInfoCount, pBindInfos);
7155 }
7156 
on_vkBindImageMemory2KHR(android::base::BumpPool * pool,VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)7157 VkResult VkDecoderGlobalState::on_vkBindImageMemory2KHR(android::base::BumpPool* pool,
7158                                                         VkDevice device, uint32_t bindInfoCount,
7159                                                         const VkBindImageMemoryInfo* pBindInfos) {
7160     return mImpl->on_vkBindImageMemory2(pool, device, bindInfoCount, pBindInfos);
7161 }
7162 
on_vkCreateImageView(android::base::BumpPool * pool,VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)7163 VkResult VkDecoderGlobalState::on_vkCreateImageView(android::base::BumpPool* pool, VkDevice device,
7164                                                     const VkImageViewCreateInfo* pCreateInfo,
7165                                                     const VkAllocationCallbacks* pAllocator,
7166                                                     VkImageView* pView) {
7167     return mImpl->on_vkCreateImageView(pool, device, pCreateInfo, pAllocator, pView);
7168 }
7169 
on_vkDestroyImageView(android::base::BumpPool * pool,VkDevice device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)7170 void VkDecoderGlobalState::on_vkDestroyImageView(android::base::BumpPool* pool, VkDevice device,
7171                                                  VkImageView imageView,
7172                                                  const VkAllocationCallbacks* pAllocator) {
7173     mImpl->on_vkDestroyImageView(pool, device, imageView, pAllocator);
7174 }
7175 
on_vkCreateSampler(android::base::BumpPool * pool,VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)7176 VkResult VkDecoderGlobalState::on_vkCreateSampler(android::base::BumpPool* pool, VkDevice device,
7177                                                   const VkSamplerCreateInfo* pCreateInfo,
7178                                                   const VkAllocationCallbacks* pAllocator,
7179                                                   VkSampler* pSampler) {
7180     return mImpl->on_vkCreateSampler(pool, device, pCreateInfo, pAllocator, pSampler);
7181 }
7182 
on_vkDestroySampler(android::base::BumpPool * pool,VkDevice device,VkSampler sampler,const VkAllocationCallbacks * pAllocator)7183 void VkDecoderGlobalState::on_vkDestroySampler(android::base::BumpPool* pool, VkDevice device,
7184                                                VkSampler sampler,
7185                                                const VkAllocationCallbacks* pAllocator) {
7186     mImpl->on_vkDestroySampler(pool, device, sampler, pAllocator);
7187 }
7188 
on_vkCreateSemaphore(android::base::BumpPool * pool,VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)7189 VkResult VkDecoderGlobalState::on_vkCreateSemaphore(android::base::BumpPool* pool, VkDevice device,
7190                                                     const VkSemaphoreCreateInfo* pCreateInfo,
7191                                                     const VkAllocationCallbacks* pAllocator,
7192                                                     VkSemaphore* pSemaphore) {
7193     return mImpl->on_vkCreateSemaphore(pool, device, pCreateInfo, pAllocator, pSemaphore);
7194 }
7195 
on_vkImportSemaphoreFdKHR(android::base::BumpPool * pool,VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo)7196 VkResult VkDecoderGlobalState::on_vkImportSemaphoreFdKHR(
7197     android::base::BumpPool* pool, VkDevice device,
7198     const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
7199     return mImpl->on_vkImportSemaphoreFdKHR(pool, device, pImportSemaphoreFdInfo);
7200 }
7201 
on_vkGetSemaphoreFdKHR(android::base::BumpPool * pool,VkDevice device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)7202 VkResult VkDecoderGlobalState::on_vkGetSemaphoreFdKHR(android::base::BumpPool* pool,
7203                                                       VkDevice device,
7204                                                       const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
7205                                                       int* pFd) {
7206     return mImpl->on_vkGetSemaphoreFdKHR(pool, device, pGetFdInfo, pFd);
7207 }
7208 
on_vkDestroySemaphore(android::base::BumpPool * pool,VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)7209 void VkDecoderGlobalState::on_vkDestroySemaphore(android::base::BumpPool* pool, VkDevice device,
7210                                                  VkSemaphore semaphore,
7211                                                  const VkAllocationCallbacks* pAllocator) {
7212     mImpl->on_vkDestroySemaphore(pool, device, semaphore, pAllocator);
7213 }
7214 
on_vkCreateFence(android::base::BumpPool * pool,VkDevice device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)7215 VkResult VkDecoderGlobalState::on_vkCreateFence(android::base::BumpPool* pool, VkDevice device,
7216                                                 const VkFenceCreateInfo* pCreateInfo,
7217                                                 const VkAllocationCallbacks* pAllocator,
7218                                                 VkFence* pFence) {
7219     return mImpl->on_vkCreateFence(pool, device, pCreateInfo, pAllocator, pFence);
7220 }
7221 
on_vkResetFences(android::base::BumpPool * pool,VkDevice device,uint32_t fenceCount,const VkFence * pFences)7222 VkResult VkDecoderGlobalState::on_vkResetFences(android::base::BumpPool* pool, VkDevice device,
7223                                                 uint32_t fenceCount, const VkFence* pFences) {
7224     return mImpl->on_vkResetFences(pool, device, fenceCount, pFences);
7225 }
7226 
on_vkDestroyFence(android::base::BumpPool * pool,VkDevice device,VkFence fence,const VkAllocationCallbacks * pAllocator)7227 void VkDecoderGlobalState::on_vkDestroyFence(android::base::BumpPool* pool, VkDevice device,
7228                                              VkFence fence,
7229                                              const VkAllocationCallbacks* pAllocator) {
7230     return mImpl->on_vkDestroyFence(pool, device, fence, pAllocator);
7231 }
7232 
on_vkCreateDescriptorSetLayout(android::base::BumpPool * pool,VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)7233 VkResult VkDecoderGlobalState::on_vkCreateDescriptorSetLayout(
7234     android::base::BumpPool* pool, VkDevice device,
7235     const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
7236     VkDescriptorSetLayout* pSetLayout) {
7237     return mImpl->on_vkCreateDescriptorSetLayout(pool, device, pCreateInfo, pAllocator, pSetLayout);
7238 }
7239 
on_vkDestroyDescriptorSetLayout(android::base::BumpPool * pool,VkDevice device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)7240 void VkDecoderGlobalState::on_vkDestroyDescriptorSetLayout(
7241     android::base::BumpPool* pool, VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
7242     const VkAllocationCallbacks* pAllocator) {
7243     mImpl->on_vkDestroyDescriptorSetLayout(pool, device, descriptorSetLayout, pAllocator);
7244 }
7245 
on_vkCreateDescriptorPool(android::base::BumpPool * pool,VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)7246 VkResult VkDecoderGlobalState::on_vkCreateDescriptorPool(
7247     android::base::BumpPool* pool, VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo,
7248     const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool) {
7249     return mImpl->on_vkCreateDescriptorPool(pool, device, pCreateInfo, pAllocator, pDescriptorPool);
7250 }
7251 
on_vkDestroyDescriptorPool(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)7252 void VkDecoderGlobalState::on_vkDestroyDescriptorPool(android::base::BumpPool* pool,
7253                                                       VkDevice device,
7254                                                       VkDescriptorPool descriptorPool,
7255                                                       const VkAllocationCallbacks* pAllocator) {
7256     mImpl->on_vkDestroyDescriptorPool(pool, device, descriptorPool, pAllocator);
7257 }
7258 
on_vkResetDescriptorPool(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)7259 VkResult VkDecoderGlobalState::on_vkResetDescriptorPool(android::base::BumpPool* pool,
7260                                                         VkDevice device,
7261                                                         VkDescriptorPool descriptorPool,
7262                                                         VkDescriptorPoolResetFlags flags) {
7263     return mImpl->on_vkResetDescriptorPool(pool, device, descriptorPool, flags);
7264 }
7265 
on_vkAllocateDescriptorSets(android::base::BumpPool * pool,VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)7266 VkResult VkDecoderGlobalState::on_vkAllocateDescriptorSets(
7267     android::base::BumpPool* pool, VkDevice device,
7268     const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets) {
7269     return mImpl->on_vkAllocateDescriptorSets(pool, device, pAllocateInfo, pDescriptorSets);
7270 }
7271 
on_vkFreeDescriptorSets(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)7272 VkResult VkDecoderGlobalState::on_vkFreeDescriptorSets(android::base::BumpPool* pool,
7273                                                        VkDevice device,
7274                                                        VkDescriptorPool descriptorPool,
7275                                                        uint32_t descriptorSetCount,
7276                                                        const VkDescriptorSet* pDescriptorSets) {
7277     return mImpl->on_vkFreeDescriptorSets(pool, device, descriptorPool, descriptorSetCount,
7278                                           pDescriptorSets);
7279 }
7280 
on_vkUpdateDescriptorSets(android::base::BumpPool * pool,VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)7281 void VkDecoderGlobalState::on_vkUpdateDescriptorSets(android::base::BumpPool* pool, VkDevice device,
7282                                                      uint32_t descriptorWriteCount,
7283                                                      const VkWriteDescriptorSet* pDescriptorWrites,
7284                                                      uint32_t descriptorCopyCount,
7285                                                      const VkCopyDescriptorSet* pDescriptorCopies) {
7286     mImpl->on_vkUpdateDescriptorSets(pool, device, descriptorWriteCount, pDescriptorWrites,
7287                                      descriptorCopyCount, pDescriptorCopies);
7288 }
7289 
on_vkCreateShaderModule(android::base::BumpPool * pool,VkDevice boxed_device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule)7290 VkResult VkDecoderGlobalState::on_vkCreateShaderModule(android::base::BumpPool* pool,
7291                                                        VkDevice boxed_device,
7292                                                        const VkShaderModuleCreateInfo* pCreateInfo,
7293                                                        const VkAllocationCallbacks* pAllocator,
7294                                                        VkShaderModule* pShaderModule) {
7295     return mImpl->on_vkCreateShaderModule(pool, boxed_device, pCreateInfo, pAllocator,
7296                                           pShaderModule);
7297 }
7298 
on_vkDestroyShaderModule(android::base::BumpPool * pool,VkDevice boxed_device,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)7299 void VkDecoderGlobalState::on_vkDestroyShaderModule(android::base::BumpPool* pool,
7300                                                     VkDevice boxed_device,
7301                                                     VkShaderModule shaderModule,
7302                                                     const VkAllocationCallbacks* pAllocator) {
7303     mImpl->on_vkDestroyShaderModule(pool, boxed_device, shaderModule, pAllocator);
7304 }
7305 
on_vkCreatePipelineCache(android::base::BumpPool * pool,VkDevice boxed_device,const VkPipelineCacheCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineCache * pPipelineCache)7306 VkResult VkDecoderGlobalState::on_vkCreatePipelineCache(
7307     android::base::BumpPool* pool, VkDevice boxed_device,
7308     const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
7309     VkPipelineCache* pPipelineCache) {
7310     return mImpl->on_vkCreatePipelineCache(pool, boxed_device, pCreateInfo, pAllocator,
7311                                            pPipelineCache);
7312 }
7313 
on_vkDestroyPipelineCache(android::base::BumpPool * pool,VkDevice boxed_device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)7314 void VkDecoderGlobalState::on_vkDestroyPipelineCache(android::base::BumpPool* pool,
7315                                                      VkDevice boxed_device,
7316                                                      VkPipelineCache pipelineCache,
7317                                                      const VkAllocationCallbacks* pAllocator) {
7318     mImpl->on_vkDestroyPipelineCache(pool, boxed_device, pipelineCache, pAllocator);
7319 }
7320 
on_vkCreateGraphicsPipelines(android::base::BumpPool * pool,VkDevice boxed_device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)7321 VkResult VkDecoderGlobalState::on_vkCreateGraphicsPipelines(
7322     android::base::BumpPool* pool, VkDevice boxed_device, VkPipelineCache pipelineCache,
7323     uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos,
7324     const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) {
7325     return mImpl->on_vkCreateGraphicsPipelines(pool, boxed_device, pipelineCache, createInfoCount,
7326                                                pCreateInfos, pAllocator, pPipelines);
7327 }
7328 
on_vkDestroyPipeline(android::base::BumpPool * pool,VkDevice boxed_device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)7329 void VkDecoderGlobalState::on_vkDestroyPipeline(android::base::BumpPool* pool,
7330                                                 VkDevice boxed_device, VkPipeline pipeline,
7331                                                 const VkAllocationCallbacks* pAllocator) {
7332     mImpl->on_vkDestroyPipeline(pool, boxed_device, pipeline, pAllocator);
7333 }
7334 
on_vkCmdCopyBufferToImage(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions,const VkDecoderContext & context)7335 void VkDecoderGlobalState::on_vkCmdCopyBufferToImage(
7336     android::base::BumpPool* pool, VkCommandBuffer commandBuffer, VkBuffer srcBuffer,
7337     VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
7338     const VkBufferImageCopy* pRegions, const VkDecoderContext& context) {
7339     mImpl->on_vkCmdCopyBufferToImage(pool, commandBuffer, srcBuffer, dstImage, dstImageLayout,
7340                                      regionCount, pRegions, context);
7341 }
7342 
on_vkCmdCopyImage(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)7343 void VkDecoderGlobalState::on_vkCmdCopyImage(android::base::BumpPool* pool,
7344                                              VkCommandBuffer commandBuffer, VkImage srcImage,
7345                                              VkImageLayout srcImageLayout, VkImage dstImage,
7346                                              VkImageLayout dstImageLayout, uint32_t regionCount,
7347                                              const VkImageCopy* pRegions) {
7348     mImpl->on_vkCmdCopyImage(pool, commandBuffer, srcImage, srcImageLayout, dstImage,
7349                              dstImageLayout, regionCount, pRegions);
7350 }
on_vkCmdCopyImageToBuffer(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)7351 void VkDecoderGlobalState::on_vkCmdCopyImageToBuffer(android::base::BumpPool* pool,
7352                                                      VkCommandBuffer commandBuffer,
7353                                                      VkImage srcImage, VkImageLayout srcImageLayout,
7354                                                      VkBuffer dstBuffer, uint32_t regionCount,
7355                                                      const VkBufferImageCopy* pRegions) {
7356     mImpl->on_vkCmdCopyImageToBuffer(pool, commandBuffer, srcImage, srcImageLayout, dstBuffer,
7357                                      regionCount, pRegions);
7358 }
7359 
on_vkCmdCopyBufferToImage2(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo,const VkDecoderContext & context)7360 void VkDecoderGlobalState::on_vkCmdCopyBufferToImage2(android::base::BumpPool* pool,
7361                                 VkCommandBuffer commandBuffer,
7362                                 const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo,
7363                                 const VkDecoderContext& context) {
7364     mImpl->on_vkCmdCopyBufferToImage2(pool, commandBuffer, pCopyBufferToImageInfo, context);
7365 }
7366 
on_vkCmdCopyImage2(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCopyImageInfo2 * pCopyImageInfo)7367 void VkDecoderGlobalState::on_vkCmdCopyImage2(android::base::BumpPool* pool,
7368     VkCommandBuffer commandBuffer,
7369     const VkCopyImageInfo2* pCopyImageInfo) {
7370     mImpl->on_vkCmdCopyImage2(pool, commandBuffer, pCopyImageInfo);
7371 }
7372 
on_vkCmdCopyImageToBuffer2(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo)7373 void VkDecoderGlobalState::on_vkCmdCopyImageToBuffer2(android::base::BumpPool* pool,
7374                                 VkCommandBuffer commandBuffer,
7375                                 const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo) {
7376     mImpl->on_vkCmdCopyImageToBuffer2(pool, commandBuffer, pCopyImageToBufferInfo);
7377 }
7378 
on_vkCmdCopyBufferToImage2KHR(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCopyBufferToImageInfo2KHR * pCopyBufferToImageInfo,const VkDecoderContext & context)7379 void VkDecoderGlobalState::on_vkCmdCopyBufferToImage2KHR(android::base::BumpPool* pool,
7380                                 VkCommandBuffer commandBuffer,
7381                                 const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo,
7382                                 const VkDecoderContext& context) {
7383     mImpl->on_vkCmdCopyBufferToImage2KHR(pool, commandBuffer, pCopyBufferToImageInfo, context);
7384 }
7385 
on_vkCmdCopyImage2KHR(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCopyImageInfo2KHR * pCopyImageInfo)7386 void VkDecoderGlobalState::on_vkCmdCopyImage2KHR(android::base::BumpPool* pool,
7387     VkCommandBuffer commandBuffer,
7388     const VkCopyImageInfo2KHR* pCopyImageInfo) {
7389     mImpl->on_vkCmdCopyImage2KHR(pool, commandBuffer, pCopyImageInfo);
7390 }
7391 
on_vkCmdCopyImageToBuffer2KHR(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCopyImageToBufferInfo2KHR * pCopyImageToBufferInfo)7392 void VkDecoderGlobalState::on_vkCmdCopyImageToBuffer2KHR(android::base::BumpPool* pool,
7393                                 VkCommandBuffer commandBuffer,
7394                                 const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo) {
7395     mImpl->on_vkCmdCopyImageToBuffer2KHR(pool, commandBuffer, pCopyImageToBufferInfo);
7396 }
7397 
on_vkGetImageMemoryRequirements(android::base::BumpPool * pool,VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)7398 void VkDecoderGlobalState::on_vkGetImageMemoryRequirements(
7399     android::base::BumpPool* pool, VkDevice device, VkImage image,
7400     VkMemoryRequirements* pMemoryRequirements) {
7401     mImpl->on_vkGetImageMemoryRequirements(pool, device, image, pMemoryRequirements);
7402 }
7403 
on_vkGetImageMemoryRequirements2(android::base::BumpPool * pool,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)7404 void VkDecoderGlobalState::on_vkGetImageMemoryRequirements2(
7405     android::base::BumpPool* pool, VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
7406     VkMemoryRequirements2* pMemoryRequirements) {
7407     mImpl->on_vkGetImageMemoryRequirements2(pool, device, pInfo, pMemoryRequirements);
7408 }
7409 
on_vkGetImageMemoryRequirements2KHR(android::base::BumpPool * pool,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)7410 void VkDecoderGlobalState::on_vkGetImageMemoryRequirements2KHR(
7411     android::base::BumpPool* pool, VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
7412     VkMemoryRequirements2* pMemoryRequirements) {
7413     mImpl->on_vkGetImageMemoryRequirements2(pool, device, pInfo, pMemoryRequirements);
7414 }
7415 
on_vkGetBufferMemoryRequirements(android::base::BumpPool * pool,VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)7416 void VkDecoderGlobalState::on_vkGetBufferMemoryRequirements(
7417     android::base::BumpPool* pool, VkDevice device, VkBuffer buffer,
7418     VkMemoryRequirements* pMemoryRequirements) {
7419     mImpl->on_vkGetBufferMemoryRequirements(pool, device, buffer, pMemoryRequirements);
7420 }
7421 
on_vkGetBufferMemoryRequirements2(android::base::BumpPool * pool,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)7422 void VkDecoderGlobalState::on_vkGetBufferMemoryRequirements2(
7423     android::base::BumpPool* pool, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
7424     VkMemoryRequirements2* pMemoryRequirements) {
7425     mImpl->on_vkGetBufferMemoryRequirements2(pool, device, pInfo, pMemoryRequirements);
7426 }
7427 
on_vkGetBufferMemoryRequirements2KHR(android::base::BumpPool * pool,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)7428 void VkDecoderGlobalState::on_vkGetBufferMemoryRequirements2KHR(
7429     android::base::BumpPool* pool, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
7430     VkMemoryRequirements2* pMemoryRequirements) {
7431     mImpl->on_vkGetBufferMemoryRequirements2(pool, device, pInfo, pMemoryRequirements);
7432 }
7433 
on_vkCmdPipelineBarrier(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)7434 void VkDecoderGlobalState::on_vkCmdPipelineBarrier(
7435     android::base::BumpPool* pool, VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
7436     VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
7437     uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
7438     uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
7439     uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
7440     mImpl->on_vkCmdPipelineBarrier(pool, commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
7441                                    memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
7442                                    pBufferMemoryBarriers, imageMemoryBarrierCount,
7443                                    pImageMemoryBarriers);
7444 }
7445 
on_vkAllocateMemory(android::base::BumpPool * pool,VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)7446 VkResult VkDecoderGlobalState::on_vkAllocateMemory(android::base::BumpPool* pool, VkDevice device,
7447                                                    const VkMemoryAllocateInfo* pAllocateInfo,
7448                                                    const VkAllocationCallbacks* pAllocator,
7449                                                    VkDeviceMemory* pMemory) {
7450     return mImpl->on_vkAllocateMemory(pool, device, pAllocateInfo, pAllocator, pMemory);
7451 }
7452 
on_vkFreeMemory(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)7453 void VkDecoderGlobalState::on_vkFreeMemory(android::base::BumpPool* pool, VkDevice device,
7454                                            VkDeviceMemory memory,
7455                                            const VkAllocationCallbacks* pAllocator) {
7456     mImpl->on_vkFreeMemory(pool, device, memory, pAllocator);
7457 }
7458 
on_vkMapMemory(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)7459 VkResult VkDecoderGlobalState::on_vkMapMemory(android::base::BumpPool* pool, VkDevice device,
7460                                               VkDeviceMemory memory, VkDeviceSize offset,
7461                                               VkDeviceSize size, VkMemoryMapFlags flags,
7462                                               void** ppData) {
7463     return mImpl->on_vkMapMemory(pool, device, memory, offset, size, flags, ppData);
7464 }
7465 
on_vkUnmapMemory(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory)7466 void VkDecoderGlobalState::on_vkUnmapMemory(android::base::BumpPool* pool, VkDevice device,
7467                                             VkDeviceMemory memory) {
7468     mImpl->on_vkUnmapMemory(pool, device, memory);
7469 }
7470 
getMappedHostPointer(VkDeviceMemory memory)7471 uint8_t* VkDecoderGlobalState::getMappedHostPointer(VkDeviceMemory memory) {
7472     return mImpl->getMappedHostPointer(memory);
7473 }
7474 
getDeviceMemorySize(VkDeviceMemory memory)7475 VkDeviceSize VkDecoderGlobalState::getDeviceMemorySize(VkDeviceMemory memory) {
7476     return mImpl->getDeviceMemorySize(memory);
7477 }
7478 
usingDirectMapping() const7479 bool VkDecoderGlobalState::usingDirectMapping() const { return mImpl->usingDirectMapping(); }
7480 
getHostFeatureSupport() const7481 VkDecoderGlobalState::HostFeatureSupport VkDecoderGlobalState::getHostFeatureSupport() const {
7482     return mImpl->getHostFeatureSupport();
7483 }
7484 
7485 // VK_ANDROID_native_buffer
on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool * pool,VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,int * grallocUsage)7486 VkResult VkDecoderGlobalState::on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool* pool,
7487                                                                     VkDevice device,
7488                                                                     VkFormat format,
7489                                                                     VkImageUsageFlags imageUsage,
7490                                                                     int* grallocUsage) {
7491     return mImpl->on_vkGetSwapchainGrallocUsageANDROID(pool, device, format, imageUsage,
7492                                                        grallocUsage);
7493 }
7494 
on_vkGetSwapchainGrallocUsage2ANDROID(android::base::BumpPool * pool,VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)7495 VkResult VkDecoderGlobalState::on_vkGetSwapchainGrallocUsage2ANDROID(
7496     android::base::BumpPool* pool, VkDevice device, VkFormat format, VkImageUsageFlags imageUsage,
7497     VkSwapchainImageUsageFlagsANDROID swapchainImageUsage, uint64_t* grallocConsumerUsage,
7498     uint64_t* grallocProducerUsage) {
7499     return mImpl->on_vkGetSwapchainGrallocUsage2ANDROID(pool, device, format, imageUsage,
7500                                                         swapchainImageUsage, grallocConsumerUsage,
7501                                                         grallocProducerUsage);
7502 }
7503 
on_vkAcquireImageANDROID(android::base::BumpPool * pool,VkDevice device,VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)7504 VkResult VkDecoderGlobalState::on_vkAcquireImageANDROID(android::base::BumpPool* pool,
7505                                                         VkDevice device, VkImage image,
7506                                                         int nativeFenceFd, VkSemaphore semaphore,
7507                                                         VkFence fence) {
7508     return mImpl->on_vkAcquireImageANDROID(pool, device, image, nativeFenceFd, semaphore, fence);
7509 }
7510 
on_vkQueueSignalReleaseImageANDROID(android::base::BumpPool * pool,VkQueue queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)7511 VkResult VkDecoderGlobalState::on_vkQueueSignalReleaseImageANDROID(
7512     android::base::BumpPool* pool, VkQueue queue, uint32_t waitSemaphoreCount,
7513     const VkSemaphore* pWaitSemaphores, VkImage image, int* pNativeFenceFd) {
7514     return mImpl->on_vkQueueSignalReleaseImageANDROID(pool, queue, waitSemaphoreCount,
7515                                                       pWaitSemaphores, image, pNativeFenceFd);
7516 }
7517 
7518 // VK_GOOGLE_gfxstream
on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,uint64_t * pAddress)7519 VkResult VkDecoderGlobalState::on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool* pool,
7520                                                                     VkDevice device,
7521                                                                     VkDeviceMemory memory,
7522                                                                     uint64_t* pAddress) {
7523     return mImpl->on_vkMapMemoryIntoAddressSpaceGOOGLE(pool, device, memory, pAddress);
7524 }
7525 
on_vkGetMemoryHostAddressInfoGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,uint64_t * pAddress,uint64_t * pSize,uint64_t * pHostmemId)7526 VkResult VkDecoderGlobalState::on_vkGetMemoryHostAddressInfoGOOGLE(
7527     android::base::BumpPool* pool, VkDevice device, VkDeviceMemory memory, uint64_t* pAddress,
7528     uint64_t* pSize, uint64_t* pHostmemId) {
7529     return mImpl->on_vkGetMemoryHostAddressInfoGOOGLE(pool, device, memory, pAddress, pSize,
7530                                                       pHostmemId);
7531 }
7532 
on_vkGetBlobGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory)7533 VkResult VkDecoderGlobalState::on_vkGetBlobGOOGLE(android::base::BumpPool* pool, VkDevice device,
7534                                                   VkDeviceMemory memory) {
7535     return mImpl->on_vkGetBlobGOOGLE(pool, device, memory);
7536 }
7537 
on_vkFreeMemorySyncGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)7538 VkResult VkDecoderGlobalState::on_vkFreeMemorySyncGOOGLE(android::base::BumpPool* pool,
7539                                                          VkDevice device, VkDeviceMemory memory,
7540                                                          const VkAllocationCallbacks* pAllocator) {
7541     return mImpl->on_vkFreeMemorySyncGOOGLE(pool, device, memory, pAllocator);
7542 }
7543 
on_vkAllocateCommandBuffers(android::base::BumpPool * pool,VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)7544 VkResult VkDecoderGlobalState::on_vkAllocateCommandBuffers(
7545     android::base::BumpPool* pool, VkDevice device,
7546     const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers) {
7547     return mImpl->on_vkAllocateCommandBuffers(pool, device, pAllocateInfo, pCommandBuffers);
7548 }
7549 
on_vkCreateCommandPool(android::base::BumpPool * pool,VkDevice device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)7550 VkResult VkDecoderGlobalState::on_vkCreateCommandPool(android::base::BumpPool* pool,
7551                                                       VkDevice device,
7552                                                       const VkCommandPoolCreateInfo* pCreateInfo,
7553                                                       const VkAllocationCallbacks* pAllocator,
7554                                                       VkCommandPool* pCommandPool) {
7555     return mImpl->on_vkCreateCommandPool(pool, device, pCreateInfo, pAllocator, pCommandPool);
7556 }
7557 
on_vkDestroyCommandPool(android::base::BumpPool * pool,VkDevice device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)7558 void VkDecoderGlobalState::on_vkDestroyCommandPool(android::base::BumpPool* pool, VkDevice device,
7559                                                    VkCommandPool commandPool,
7560                                                    const VkAllocationCallbacks* pAllocator) {
7561     mImpl->on_vkDestroyCommandPool(pool, device, commandPool, pAllocator);
7562 }
7563 
on_vkResetCommandPool(android::base::BumpPool * pool,VkDevice device,VkCommandPool commandPool,VkCommandPoolResetFlags flags)7564 VkResult VkDecoderGlobalState::on_vkResetCommandPool(android::base::BumpPool* pool, VkDevice device,
7565                                                      VkCommandPool commandPool,
7566                                                      VkCommandPoolResetFlags flags) {
7567     return mImpl->on_vkResetCommandPool(pool, device, commandPool, flags);
7568 }
7569 
on_vkCmdExecuteCommands(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)7570 void VkDecoderGlobalState::on_vkCmdExecuteCommands(android::base::BumpPool* pool,
7571                                                    VkCommandBuffer commandBuffer,
7572                                                    uint32_t commandBufferCount,
7573                                                    const VkCommandBuffer* pCommandBuffers) {
7574     return mImpl->on_vkCmdExecuteCommands(pool, commandBuffer, commandBufferCount, pCommandBuffers);
7575 }
7576 
on_vkQueueSubmit(android::base::BumpPool * pool,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)7577 VkResult VkDecoderGlobalState::on_vkQueueSubmit(android::base::BumpPool* pool, VkQueue queue,
7578                                                 uint32_t submitCount, const VkSubmitInfo* pSubmits,
7579                                                 VkFence fence) {
7580     return mImpl->on_vkQueueSubmit(pool, queue, submitCount, pSubmits, fence);
7581 }
7582 
on_vkQueueSubmit2(android::base::BumpPool * pool,VkQueue queue,uint32_t submitCount,const VkSubmitInfo2 * pSubmits,VkFence fence)7583 VkResult VkDecoderGlobalState::on_vkQueueSubmit2(android::base::BumpPool* pool, VkQueue queue,
7584                                                  uint32_t submitCount,
7585                                                  const VkSubmitInfo2* pSubmits, VkFence fence) {
7586     return mImpl->on_vkQueueSubmit(pool, queue, submitCount, pSubmits, fence);
7587 }
7588 
on_vkQueueWaitIdle(android::base::BumpPool * pool,VkQueue queue)7589 VkResult VkDecoderGlobalState::on_vkQueueWaitIdle(android::base::BumpPool* pool, VkQueue queue) {
7590     return mImpl->on_vkQueueWaitIdle(pool, queue);
7591 }
7592 
on_vkResetCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)7593 VkResult VkDecoderGlobalState::on_vkResetCommandBuffer(android::base::BumpPool* pool,
7594                                                        VkCommandBuffer commandBuffer,
7595                                                        VkCommandBufferResetFlags flags) {
7596     return mImpl->on_vkResetCommandBuffer(pool, commandBuffer, flags);
7597 }
7598 
on_vkFreeCommandBuffers(android::base::BumpPool * pool,VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)7599 void VkDecoderGlobalState::on_vkFreeCommandBuffers(android::base::BumpPool* pool, VkDevice device,
7600                                                    VkCommandPool commandPool,
7601                                                    uint32_t commandBufferCount,
7602                                                    const VkCommandBuffer* pCommandBuffers) {
7603     return mImpl->on_vkFreeCommandBuffers(pool, device, commandPool, commandBufferCount,
7604                                           pCommandBuffers);
7605 }
7606 
on_vkGetPhysicalDeviceExternalSemaphoreProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)7607 void VkDecoderGlobalState::on_vkGetPhysicalDeviceExternalSemaphoreProperties(
7608     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7609     const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
7610     VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
7611     return mImpl->on_vkGetPhysicalDeviceExternalSemaphoreProperties(
7612         pool, physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
7613 }
7614 
on_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)7615 void VkDecoderGlobalState::on_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
7616     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7617     const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
7618     VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
7619     return mImpl->on_vkGetPhysicalDeviceExternalSemaphoreProperties(
7620         pool, physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
7621 }
7622 
7623 // Descriptor update templates
on_vkCreateDescriptorUpdateTemplate(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)7624 VkResult VkDecoderGlobalState::on_vkCreateDescriptorUpdateTemplate(
7625     android::base::BumpPool* pool, VkDevice boxed_device,
7626     const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
7627     const VkAllocationCallbacks* pAllocator,
7628     VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
7629     return mImpl->on_vkCreateDescriptorUpdateTemplate(pool, boxed_device, pCreateInfo, pAllocator,
7630                                                       pDescriptorUpdateTemplate);
7631 }
7632 
on_vkCreateDescriptorUpdateTemplateKHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)7633 VkResult VkDecoderGlobalState::on_vkCreateDescriptorUpdateTemplateKHR(
7634     android::base::BumpPool* pool, VkDevice boxed_device,
7635     const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
7636     const VkAllocationCallbacks* pAllocator,
7637     VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
7638     return mImpl->on_vkCreateDescriptorUpdateTemplateKHR(pool, boxed_device, pCreateInfo,
7639                                                          pAllocator, pDescriptorUpdateTemplate);
7640 }
7641 
on_vkDestroyDescriptorUpdateTemplate(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)7642 void VkDecoderGlobalState::on_vkDestroyDescriptorUpdateTemplate(
7643     android::base::BumpPool* pool, VkDevice boxed_device,
7644     VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {
7645     mImpl->on_vkDestroyDescriptorUpdateTemplate(pool, boxed_device, descriptorUpdateTemplate,
7646                                                 pAllocator);
7647 }
7648 
on_vkDestroyDescriptorUpdateTemplateKHR(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)7649 void VkDecoderGlobalState::on_vkDestroyDescriptorUpdateTemplateKHR(
7650     android::base::BumpPool* pool, VkDevice boxed_device,
7651     VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {
7652     mImpl->on_vkDestroyDescriptorUpdateTemplateKHR(pool, boxed_device, descriptorUpdateTemplate,
7653                                                    pAllocator);
7654 }
7655 
on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,uint32_t imageInfoCount,uint32_t bufferInfoCount,uint32_t bufferViewCount,const uint32_t * pImageInfoEntryIndices,const uint32_t * pBufferInfoEntryIndices,const uint32_t * pBufferViewEntryIndices,const VkDescriptorImageInfo * pImageInfos,const VkDescriptorBufferInfo * pBufferInfos,const VkBufferView * pBufferViews)7656 void VkDecoderGlobalState::on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
7657     android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
7658     VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
7659     uint32_t bufferInfoCount, uint32_t bufferViewCount, const uint32_t* pImageInfoEntryIndices,
7660     const uint32_t* pBufferInfoEntryIndices, const uint32_t* pBufferViewEntryIndices,
7661     const VkDescriptorImageInfo* pImageInfos, const VkDescriptorBufferInfo* pBufferInfos,
7662     const VkBufferView* pBufferViews) {
7663     mImpl->on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
7664         pool, boxed_device, descriptorSet, descriptorUpdateTemplate, imageInfoCount,
7665         bufferInfoCount, bufferViewCount, pImageInfoEntryIndices, pBufferInfoEntryIndices,
7666         pBufferViewEntryIndices, pImageInfos, pBufferInfos, pBufferViews);
7667 }
7668 
on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,uint32_t imageInfoCount,uint32_t bufferInfoCount,uint32_t bufferViewCount,uint32_t inlineUniformBlockCount,const uint32_t * pImageInfoEntryIndices,const uint32_t * pBufferInfoEntryIndices,const uint32_t * pBufferViewEntryIndices,const VkDescriptorImageInfo * pImageInfos,const VkDescriptorBufferInfo * pBufferInfos,const VkBufferView * pBufferViews,const uint8_t * pInlineUniformBlockData)7669 void VkDecoderGlobalState::on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(
7670     android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
7671     VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
7672     uint32_t bufferInfoCount, uint32_t bufferViewCount, uint32_t inlineUniformBlockCount,
7673     const uint32_t* pImageInfoEntryIndices, const uint32_t* pBufferInfoEntryIndices,
7674     const uint32_t* pBufferViewEntryIndices, const VkDescriptorImageInfo* pImageInfos,
7675     const VkDescriptorBufferInfo* pBufferInfos, const VkBufferView* pBufferViews,
7676     const uint8_t* pInlineUniformBlockData) {
7677     mImpl->on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(
7678         pool, boxed_device, descriptorSet, descriptorUpdateTemplate, imageInfoCount,
7679         bufferInfoCount, bufferViewCount, inlineUniformBlockCount, pImageInfoEntryIndices,
7680         pBufferInfoEntryIndices, pBufferViewEntryIndices, pImageInfos, pBufferInfos, pBufferViews,
7681         pInlineUniformBlockData);
7682 }
7683 
on_vkBeginCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo,const VkDecoderContext & context)7684 VkResult VkDecoderGlobalState::on_vkBeginCommandBuffer(android::base::BumpPool* pool,
7685                                                        VkCommandBuffer commandBuffer,
7686                                                        const VkCommandBufferBeginInfo* pBeginInfo,
7687                                                        const VkDecoderContext& context) {
7688     return mImpl->on_vkBeginCommandBuffer(pool, commandBuffer, pBeginInfo, context);
7689 }
7690 
on_vkBeginCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo,const VkDecoderContext & context)7691 void VkDecoderGlobalState::on_vkBeginCommandBufferAsyncGOOGLE(
7692     android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
7693     const VkCommandBufferBeginInfo* pBeginInfo, const VkDecoderContext& context) {
7694     mImpl->on_vkBeginCommandBuffer(pool, commandBuffer, pBeginInfo, context);
7695 }
7696 
on_vkEndCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkDecoderContext & context)7697 VkResult VkDecoderGlobalState::on_vkEndCommandBuffer(android::base::BumpPool* pool,
7698                                                      VkCommandBuffer commandBuffer,
7699                                                      const VkDecoderContext& context) {
7700     return mImpl->on_vkEndCommandBuffer(pool, commandBuffer, context);
7701 }
7702 
on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkDecoderContext & context)7703 void VkDecoderGlobalState::on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
7704                                                             VkCommandBuffer commandBuffer,
7705                                                             const VkDecoderContext& context) {
7706     mImpl->on_vkEndCommandBufferAsyncGOOGLE(pool, commandBuffer, context);
7707 }
7708 
on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)7709 void VkDecoderGlobalState::on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
7710                                                               VkCommandBuffer commandBuffer,
7711                                                               VkCommandBufferResetFlags flags) {
7712     mImpl->on_vkResetCommandBufferAsyncGOOGLE(pool, commandBuffer, flags);
7713 }
7714 
on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,uint32_t needHostSync,uint32_t sequenceNumber)7715 void VkDecoderGlobalState::on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool* pool,
7716                                                             VkCommandBuffer commandBuffer,
7717                                                             uint32_t needHostSync,
7718                                                             uint32_t sequenceNumber) {
7719     mImpl->hostSyncCommandBuffer("hostSync", commandBuffer, needHostSync, sequenceNumber);
7720 }
7721 
on_vkCreateImageWithRequirementsGOOGLE(android::base::BumpPool * pool,VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage,VkMemoryRequirements * pMemoryRequirements)7722 VkResult VkDecoderGlobalState::on_vkCreateImageWithRequirementsGOOGLE(
7723     android::base::BumpPool* pool, VkDevice device, const VkImageCreateInfo* pCreateInfo,
7724     const VkAllocationCallbacks* pAllocator, VkImage* pImage,
7725     VkMemoryRequirements* pMemoryRequirements) {
7726     return mImpl->on_vkCreateImageWithRequirementsGOOGLE(pool, device, pCreateInfo, pAllocator,
7727                                                          pImage, pMemoryRequirements);
7728 }
7729 
on_vkCreateBufferWithRequirementsGOOGLE(android::base::BumpPool * pool,VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer,VkMemoryRequirements * pMemoryRequirements)7730 VkResult VkDecoderGlobalState::on_vkCreateBufferWithRequirementsGOOGLE(
7731     android::base::BumpPool* pool, VkDevice device, const VkBufferCreateInfo* pCreateInfo,
7732     const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer,
7733     VkMemoryRequirements* pMemoryRequirements) {
7734     return mImpl->on_vkCreateBufferWithRequirementsGOOGLE(pool, device, pCreateInfo, pAllocator,
7735                                                           pBuffer, pMemoryRequirements);
7736 }
7737 
on_vkCmdBindPipeline(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)7738 void VkDecoderGlobalState::on_vkCmdBindPipeline(android::base::BumpPool* pool,
7739                                                 VkCommandBuffer commandBuffer,
7740                                                 VkPipelineBindPoint pipelineBindPoint,
7741                                                 VkPipeline pipeline) {
7742     mImpl->on_vkCmdBindPipeline(pool, commandBuffer, pipelineBindPoint, pipeline);
7743 }
7744 
on_vkCmdBindDescriptorSets(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)7745 void VkDecoderGlobalState::on_vkCmdBindDescriptorSets(
7746     android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
7747     VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet,
7748     uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets,
7749     uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
7750     mImpl->on_vkCmdBindDescriptorSets(pool, commandBuffer, pipelineBindPoint, layout, firstSet,
7751                                       descriptorSetCount, pDescriptorSets, dynamicOffsetCount,
7752                                       pDynamicOffsets);
7753 }
7754 
on_vkCreateRenderPass(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)7755 VkResult VkDecoderGlobalState::on_vkCreateRenderPass(android::base::BumpPool* pool,
7756                                                      VkDevice boxed_device,
7757                                                      const VkRenderPassCreateInfo* pCreateInfo,
7758                                                      const VkAllocationCallbacks* pAllocator,
7759                                                      VkRenderPass* pRenderPass) {
7760     return mImpl->on_vkCreateRenderPass(pool, boxed_device, pCreateInfo, pAllocator, pRenderPass);
7761 }
7762 
on_vkCreateRenderPass2(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo2 * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)7763 VkResult VkDecoderGlobalState::on_vkCreateRenderPass2(android::base::BumpPool* pool,
7764                                                       VkDevice boxed_device,
7765                                                       const VkRenderPassCreateInfo2* pCreateInfo,
7766                                                       const VkAllocationCallbacks* pAllocator,
7767                                                       VkRenderPass* pRenderPass) {
7768     return mImpl->on_vkCreateRenderPass2(pool, boxed_device, pCreateInfo, pAllocator, pRenderPass);
7769 }
7770 
on_vkCreateRenderPass2KHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo2KHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)7771 VkResult VkDecoderGlobalState::on_vkCreateRenderPass2KHR(
7772     android::base::BumpPool* pool, VkDevice boxed_device,
7773     const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator,
7774     VkRenderPass* pRenderPass) {
7775     return mImpl->on_vkCreateRenderPass2(pool, boxed_device, pCreateInfo, pAllocator, pRenderPass);
7776 }
7777 
on_vkDestroyRenderPass(android::base::BumpPool * pool,VkDevice boxed_device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)7778 void VkDecoderGlobalState::on_vkDestroyRenderPass(android::base::BumpPool* pool,
7779                                                   VkDevice boxed_device, VkRenderPass renderPass,
7780                                                   const VkAllocationCallbacks* pAllocator) {
7781     mImpl->on_vkDestroyRenderPass(pool, boxed_device, renderPass, pAllocator);
7782 }
7783 
on_vkCreateFramebuffer(android::base::BumpPool * pool,VkDevice boxed_device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)7784 VkResult VkDecoderGlobalState::on_vkCreateFramebuffer(android::base::BumpPool* pool,
7785                                                       VkDevice boxed_device,
7786                                                       const VkFramebufferCreateInfo* pCreateInfo,
7787                                                       const VkAllocationCallbacks* pAllocator,
7788                                                       VkFramebuffer* pFramebuffer) {
7789     return mImpl->on_vkCreateFramebuffer(pool, boxed_device, pCreateInfo, pAllocator, pFramebuffer);
7790 }
7791 
on_vkDestroyFramebuffer(android::base::BumpPool * pool,VkDevice boxed_device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)7792 void VkDecoderGlobalState::on_vkDestroyFramebuffer(android::base::BumpPool* pool,
7793                                                    VkDevice boxed_device, VkFramebuffer framebuffer,
7794                                                    const VkAllocationCallbacks* pAllocator) {
7795     mImpl->on_vkDestroyFramebuffer(pool, boxed_device, framebuffer, pAllocator);
7796 }
7797 
on_vkQueueHostSyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t needHostSync,uint32_t sequenceNumber)7798 void VkDecoderGlobalState::on_vkQueueHostSyncGOOGLE(android::base::BumpPool* pool, VkQueue queue,
7799                                                     uint32_t needHostSync,
7800                                                     uint32_t sequenceNumber) {
7801     mImpl->hostSyncQueue("hostSyncQueue", queue, needHostSync, sequenceNumber);
7802 }
7803 
on_vkCmdCopyQueryPoolResults(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)7804 void VkDecoderGlobalState::on_vkCmdCopyQueryPoolResults(android::base::BumpPool* pool,
7805                                                         VkCommandBuffer commandBuffer,
7806                                                         VkQueryPool queryPool, uint32_t firstQuery,
7807                                                         uint32_t queryCount, VkBuffer dstBuffer,
7808                                                         VkDeviceSize dstOffset, VkDeviceSize stride,
7809                                                         VkQueryResultFlags flags) {
7810     mImpl->on_vkCmdCopyQueryPoolResults(pool, commandBuffer, queryPool, firstQuery, queryCount,
7811                                         dstBuffer, dstOffset, stride, flags);
7812 }
7813 
on_vkQueueSubmitAsyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)7814 void VkDecoderGlobalState::on_vkQueueSubmitAsyncGOOGLE(android::base::BumpPool* pool, VkQueue queue,
7815                                                        uint32_t submitCount,
7816                                                        const VkSubmitInfo* pSubmits,
7817                                                        VkFence fence) {
7818     mImpl->on_vkQueueSubmit(pool, queue, submitCount, pSubmits, fence);
7819 }
7820 
on_vkQueueSubmitAsync2GOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t submitCount,const VkSubmitInfo2 * pSubmits,VkFence fence)7821 void VkDecoderGlobalState::on_vkQueueSubmitAsync2GOOGLE(android::base::BumpPool* pool,
7822                                                         VkQueue queue, uint32_t submitCount,
7823                                                         const VkSubmitInfo2* pSubmits,
7824                                                         VkFence fence) {
7825     mImpl->on_vkQueueSubmit(pool, queue, submitCount, pSubmits, fence);
7826 }
7827 
on_vkQueueWaitIdleAsyncGOOGLE(android::base::BumpPool * pool,VkQueue queue)7828 void VkDecoderGlobalState::on_vkQueueWaitIdleAsyncGOOGLE(android::base::BumpPool* pool,
7829                                                          VkQueue queue) {
7830     mImpl->on_vkQueueWaitIdle(pool, queue);
7831 }
7832 
on_vkQueueBindSparseAsyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)7833 void VkDecoderGlobalState::on_vkQueueBindSparseAsyncGOOGLE(android::base::BumpPool* pool,
7834                                                            VkQueue queue, uint32_t bindInfoCount,
7835                                                            const VkBindSparseInfo* pBindInfo,
7836                                                            VkFence fence) {
7837     mImpl->on_vkQueueBindSparse(pool, queue, bindInfoCount, pBindInfo, fence);
7838 }
7839 
on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool * pool,VkDevice device,VkFormat format,VkDeviceSize * pOffset,VkDeviceSize * pRowPitchAlignment)7840 void VkDecoderGlobalState::on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool* pool,
7841                                                            VkDevice device, VkFormat format,
7842                                                            VkDeviceSize* pOffset,
7843                                                            VkDeviceSize* pRowPitchAlignment) {
7844     mImpl->on_vkGetLinearImageLayoutGOOGLE(pool, device, format, pOffset, pRowPitchAlignment);
7845 }
7846 
on_vkGetLinearImageLayout2GOOGLE(android::base::BumpPool * pool,VkDevice device,const VkImageCreateInfo * pCreateInfo,VkDeviceSize * pOffset,VkDeviceSize * pRowPitchAlignment)7847 void VkDecoderGlobalState::on_vkGetLinearImageLayout2GOOGLE(android::base::BumpPool* pool,
7848                                                             VkDevice device,
7849                                                             const VkImageCreateInfo* pCreateInfo,
7850                                                             VkDeviceSize* pOffset,
7851                                                             VkDeviceSize* pRowPitchAlignment) {
7852     mImpl->on_vkGetLinearImageLayout2GOOGLE(pool, device, pCreateInfo, pOffset, pRowPitchAlignment);
7853 }
7854 
on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool * pool,VkQueue queue,VkCommandBuffer commandBuffer,VkDeviceSize dataSize,const void * pData,const VkDecoderContext & context)7855 void VkDecoderGlobalState::on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool* pool,
7856                                                          VkQueue queue,
7857                                                          VkCommandBuffer commandBuffer,
7858                                                          VkDeviceSize dataSize, const void* pData,
7859                                                          const VkDecoderContext& context) {
7860     mImpl->on_vkQueueFlushCommandsGOOGLE(pool, queue, commandBuffer, dataSize, pData, context);
7861 }
7862 
on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(android::base::BumpPool * pool,VkQueue queue,VkCommandBuffer commandBuffer,VkDeviceMemory deviceMemory,VkDeviceSize dataOffset,VkDeviceSize dataSize,const VkDecoderContext & context)7863 void VkDecoderGlobalState::on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(
7864     android::base::BumpPool* pool, VkQueue queue, VkCommandBuffer commandBuffer,
7865     VkDeviceMemory deviceMemory, VkDeviceSize dataOffset, VkDeviceSize dataSize,
7866     const VkDecoderContext& context) {
7867     mImpl->on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(pool, queue, commandBuffer, deviceMemory,
7868                                                       dataOffset, dataSize, context);
7869 }
7870 
on_vkQueueCommitDescriptorSetUpdatesGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t descriptorPoolCount,const VkDescriptorPool * pDescriptorPools,uint32_t descriptorSetCount,const VkDescriptorSetLayout * pDescriptorSetLayouts,const uint64_t * pDescriptorSetPoolIds,const uint32_t * pDescriptorSetWhichPool,const uint32_t * pDescriptorSetPendingAllocation,const uint32_t * pDescriptorWriteStartingIndices,uint32_t pendingDescriptorWriteCount,const VkWriteDescriptorSet * pPendingDescriptorWrites)7871 void VkDecoderGlobalState::on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
7872     android::base::BumpPool* pool, VkQueue queue, uint32_t descriptorPoolCount,
7873     const VkDescriptorPool* pDescriptorPools, uint32_t descriptorSetCount,
7874     const VkDescriptorSetLayout* pDescriptorSetLayouts, const uint64_t* pDescriptorSetPoolIds,
7875     const uint32_t* pDescriptorSetWhichPool, const uint32_t* pDescriptorSetPendingAllocation,
7876     const uint32_t* pDescriptorWriteStartingIndices, uint32_t pendingDescriptorWriteCount,
7877     const VkWriteDescriptorSet* pPendingDescriptorWrites) {
7878     mImpl->on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
7879         pool, queue, descriptorPoolCount, pDescriptorPools, descriptorSetCount,
7880         pDescriptorSetLayouts, pDescriptorSetPoolIds, pDescriptorSetWhichPool,
7881         pDescriptorSetPendingAllocation, pDescriptorWriteStartingIndices,
7882         pendingDescriptorWriteCount, pPendingDescriptorWrites);
7883 }
7884 
on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,uint32_t * pPoolIdCount,uint64_t * pPoolIds)7885 void VkDecoderGlobalState::on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool* pool,
7886                                                                VkDevice device,
7887                                                                VkDescriptorPool descriptorPool,
7888                                                                uint32_t* pPoolIdCount,
7889                                                                uint64_t* pPoolIds) {
7890     mImpl->on_vkCollectDescriptorPoolIdsGOOGLE(pool, device, descriptorPool, pPoolIdCount,
7891                                                pPoolIds);
7892 }
7893 
on_vkQueueBindSparse(android::base::BumpPool * pool,VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)7894 VkResult VkDecoderGlobalState::on_vkQueueBindSparse(android::base::BumpPool* pool, VkQueue queue,
7895                                                     uint32_t bindInfoCount,
7896                                                     const VkBindSparseInfo* pBindInfo,
7897                                                     VkFence fence) {
7898     return mImpl->on_vkQueueBindSparse(pool, queue, bindInfoCount, pBindInfo, fence);
7899 }
7900 
on_vkQueueSignalReleaseImageANDROIDAsyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image)7901 void VkDecoderGlobalState::on_vkQueueSignalReleaseImageANDROIDAsyncGOOGLE(
7902     android::base::BumpPool* pool, VkQueue queue, uint32_t waitSemaphoreCount,
7903     const VkSemaphore* pWaitSemaphores, VkImage image) {
7904     int fenceFd;
7905     mImpl->on_vkQueueSignalReleaseImageANDROID(pool, queue, waitSemaphoreCount, pWaitSemaphores,
7906                                                image, &fenceFd);
7907 }
7908 
on_vkCreateSamplerYcbcrConversion(android::base::BumpPool * pool,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)7909 VkResult VkDecoderGlobalState::on_vkCreateSamplerYcbcrConversion(
7910     android::base::BumpPool* pool, VkDevice device,
7911     const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
7912     VkSamplerYcbcrConversion* pYcbcrConversion) {
7913     return mImpl->on_vkCreateSamplerYcbcrConversion(pool, device, pCreateInfo, pAllocator,
7914                                                     pYcbcrConversion);
7915 }
7916 
on_vkCreateSamplerYcbcrConversionKHR(android::base::BumpPool * pool,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)7917 VkResult VkDecoderGlobalState::on_vkCreateSamplerYcbcrConversionKHR(
7918     android::base::BumpPool* pool, VkDevice device,
7919     const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
7920     VkSamplerYcbcrConversion* pYcbcrConversion) {
7921     return mImpl->on_vkCreateSamplerYcbcrConversion(pool, device, pCreateInfo, pAllocator,
7922                                                     pYcbcrConversion);
7923 }
7924 
on_vkDestroySamplerYcbcrConversion(android::base::BumpPool * pool,VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)7925 void VkDecoderGlobalState::on_vkDestroySamplerYcbcrConversion(
7926     android::base::BumpPool* pool, VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
7927     const VkAllocationCallbacks* pAllocator) {
7928     mImpl->on_vkDestroySamplerYcbcrConversion(pool, device, ycbcrConversion, pAllocator);
7929 }
7930 
on_vkDestroySamplerYcbcrConversionKHR(android::base::BumpPool * pool,VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)7931 void VkDecoderGlobalState::on_vkDestroySamplerYcbcrConversionKHR(
7932     android::base::BumpPool* pool, VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
7933     const VkAllocationCallbacks* pAllocator) {
7934     mImpl->on_vkDestroySamplerYcbcrConversion(pool, device, ycbcrConversion, pAllocator);
7935 }
7936 
on_DeviceLost()7937 void VkDecoderGlobalState::on_DeviceLost() { mImpl->on_DeviceLost(); }
7938 
DeviceLostHandler()7939 void VkDecoderGlobalState::DeviceLostHandler() { mImpl->DeviceLostHandler(); }
7940 
on_CheckOutOfMemory(VkResult result,uint32_t opCode,const VkDecoderContext & context,std::optional<uint64_t> allocationSize)7941 void VkDecoderGlobalState::on_CheckOutOfMemory(VkResult result, uint32_t opCode,
7942                                                const VkDecoderContext& context,
7943                                                std::optional<uint64_t> allocationSize) {
7944     mImpl->on_CheckOutOfMemory(result, opCode, context, allocationSize);
7945 }
7946 
waitForFence(VkFence boxed_fence,uint64_t timeout)7947 VkResult VkDecoderGlobalState::waitForFence(VkFence boxed_fence, uint64_t timeout) {
7948     return mImpl->waitForFence(boxed_fence, timeout);
7949 }
7950 
getFenceStatus(VkFence boxed_fence)7951 VkResult VkDecoderGlobalState::getFenceStatus(VkFence boxed_fence) {
7952     return mImpl->getFenceStatus(boxed_fence);
7953 }
7954 
registerQsriCallback(VkImage image,VkQsriTimeline::Callback callback)7955 AsyncResult VkDecoderGlobalState::registerQsriCallback(VkImage image,
7956                                                        VkQsriTimeline::Callback callback) {
7957     return mImpl->registerQsriCallback(image, std::move(callback));
7958 }
7959 
deviceMemoryTransform_tohost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)7960 void VkDecoderGlobalState::deviceMemoryTransform_tohost(VkDeviceMemory* memory,
7961                                                         uint32_t memoryCount, VkDeviceSize* offset,
7962                                                         uint32_t offsetCount, VkDeviceSize* size,
7963                                                         uint32_t sizeCount, uint32_t* typeIndex,
7964                                                         uint32_t typeIndexCount, uint32_t* typeBits,
7965                                                         uint32_t typeBitsCount) {
7966     // Not used currently
7967     (void)memory;
7968     (void)memoryCount;
7969     (void)offset;
7970     (void)offsetCount;
7971     (void)size;
7972     (void)sizeCount;
7973     (void)typeIndex;
7974     (void)typeIndexCount;
7975     (void)typeBits;
7976     (void)typeBitsCount;
7977 }
7978 
deviceMemoryTransform_fromhost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)7979 void VkDecoderGlobalState::deviceMemoryTransform_fromhost(
7980     VkDeviceMemory* memory, uint32_t memoryCount, VkDeviceSize* offset, uint32_t offsetCount,
7981     VkDeviceSize* size, uint32_t sizeCount, uint32_t* typeIndex, uint32_t typeIndexCount,
7982     uint32_t* typeBits, uint32_t typeBitsCount) {
7983     // Not used currently
7984     (void)memory;
7985     (void)memoryCount;
7986     (void)offset;
7987     (void)offsetCount;
7988     (void)size;
7989     (void)sizeCount;
7990     (void)typeIndex;
7991     (void)typeIndexCount;
7992     (void)typeBits;
7993     (void)typeBitsCount;
7994 }
7995 
snapshot()7996 VkDecoderSnapshot* VkDecoderGlobalState::snapshot() { return mImpl->snapshot(); }
7997 
7998 #define DEFINE_TRANSFORMED_TYPE_IMPL(type)                                                        \
7999     void VkDecoderGlobalState::transformImpl_##type##_tohost(const type* val, uint32_t count) {   \
8000         mImpl->transformImpl_##type##_tohost(val, count);                                         \
8001     }                                                                                             \
8002     void VkDecoderGlobalState::transformImpl_##type##_fromhost(const type* val, uint32_t count) { \
8003         mImpl->transformImpl_##type##_fromhost(val, count);                                       \
8004     }
8005 
8006 LIST_TRANSFORMED_TYPES(DEFINE_TRANSFORMED_TYPE_IMPL)
8007 
8008 #define DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DEF(type)                                         \
8009     type VkDecoderGlobalState::new_boxed_##type(type underlying, VulkanDispatch* dispatch,     \
8010                                                 bool ownDispatch) {                            \
8011         return mImpl->new_boxed_##type(underlying, dispatch, ownDispatch);                     \
8012     }                                                                                          \
8013     void VkDecoderGlobalState::delete_##type(type boxed) { mImpl->delete_##type(boxed); }      \
8014     type VkDecoderGlobalState::unbox_##type(type boxed) { return mImpl->unbox_##type(boxed); } \
8015     type VkDecoderGlobalState::unboxed_to_boxed_##type(type unboxed) {                         \
8016         return mImpl->unboxed_to_boxed_##type(unboxed);                                        \
8017     }                                                                                          \
8018     VulkanDispatch* VkDecoderGlobalState::dispatch_##type(type boxed) {                        \
8019         return mImpl->dispatch_##type(boxed);                                                  \
8020     }
8021 
8022 #define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_DEF(type)                                     \
8023     type VkDecoderGlobalState::new_boxed_non_dispatchable_##type(type underlying) {            \
8024         return mImpl->new_boxed_non_dispatchable_##type(underlying);                           \
8025     }                                                                                          \
8026     void VkDecoderGlobalState::delete_##type(type boxed) { mImpl->delete_##type(boxed); }      \
8027     type VkDecoderGlobalState::unbox_##type(type boxed) { return mImpl->unbox_##type(boxed); } \
8028     type VkDecoderGlobalState::unboxed_to_boxed_non_dispatchable_##type(type unboxed) {        \
8029         return mImpl->unboxed_to_boxed_non_dispatchable_##type(unboxed);                       \
8030     }
8031 
GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DEF)8032 GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DEF)
8033 GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_DEF)
8034 
8035 #define DEFINE_BOXED_DISPATCHABLE_HANDLE_GLOBAL_API_DEF(type)                                     \
8036     type unbox_##type(type boxed) {                                                               \
8037         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
8038         if (!elt) return VK_NULL_HANDLE;                                                          \
8039         return (type)elt->underlying;                                                             \
8040     }                                                                                             \
8041     VulkanDispatch* dispatch_##type(type boxed) {                                                 \
8042         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
8043         if (!elt) {                                                                               \
8044             fprintf(stderr, "%s: err not found boxed %p\n", __func__, boxed);                     \
8045             return nullptr;                                                                       \
8046         }                                                                                         \
8047         return elt->dispatch;                                                                     \
8048     }                                                                                             \
8049     void delete_##type(type boxed) {                                                              \
8050         if (!boxed) return;                                                                       \
8051         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
8052         if (!elt) return;                                                                         \
8053         releaseOrderMaintInfo(elt->ordMaintInfo);                                                 \
8054         if (elt->readStream) {                                                                    \
8055             sReadStreamRegistry.push(elt->readStream);                                            \
8056             elt->readStream = nullptr;                                                            \
8057         }                                                                                         \
8058         sBoxedHandleManager.remove((uint64_t)boxed);                                              \
8059     }                                                                                             \
8060     type unboxed_to_boxed_##type(type unboxed) {                                                  \
8061         AutoLock lock(sBoxedHandleManager.lock);                                                  \
8062         return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
8063     }
8064 
8065 #define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_GLOBAL_API_DEF(type)                                 \
8066     type new_boxed_non_dispatchable_##type(type underlying) {                                     \
8067         return VkDecoderGlobalState::get()->new_boxed_non_dispatchable_##type(underlying);        \
8068     }                                                                                             \
8069     void delete_##type(type boxed) {                                                              \
8070         if (!boxed) return;                                                                       \
8071         sBoxedHandleManager.remove((uint64_t)boxed);                                              \
8072     }                                                                                             \
8073     void delayed_delete_##type(type boxed, VkDevice device, std::function<void()> callback) {     \
8074         sBoxedHandleManager.removeDelayed((uint64_t)boxed, device, callback);                     \
8075     }                                                                                             \
8076     type unbox_##type(type boxed) {                                                               \
8077         if (!boxed) return boxed;                                                                 \
8078         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
8079         if (!elt) {                                                                               \
8080             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))                                       \
8081                 << "Unbox " << boxed << " failed, not found.";                                    \
8082             return VK_NULL_HANDLE;                                                                \
8083         }                                                                                         \
8084         return (type)elt->underlying;                                                             \
8085     }                                                                                             \
8086     type unboxed_to_boxed_non_dispatchable_##type(type unboxed) {                                 \
8087         AutoLock lock(sBoxedHandleManager.lock);                                                  \
8088         return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
8089     }
8090 
8091 GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_GLOBAL_API_DEF)
8092 GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_GLOBAL_API_DEF)
8093 
8094 void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::setup(android::base::BumpPool* pool,
8095                                                            uint64_t** bufPtr) {
8096     mPool = pool;
8097     mPreserveBufPtr = bufPtr;
8098 }
8099 
allocPreserve(size_t count)8100 void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::allocPreserve(size_t count) {
8101     *mPreserveBufPtr = (uint64_t*)mPool->alloc(count * sizeof(uint64_t));
8102 }
8103 
8104 #define BOXED_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL(type_name)        \
8105     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name(          \
8106         type_name* handles, size_t count) {                                               \
8107         allocPreserve(count);                                                             \
8108         for (size_t i = 0; i < count; ++i) {                                              \
8109             (*mPreserveBufPtr)[i] = (uint64_t)(handles[i]);                               \
8110             if (handles[i]) {                                                             \
8111                 handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(handles[i]);  \
8112             } else {                                                                      \
8113                 handles[i] = (type_name) nullptr;                                         \
8114             };                                                                            \
8115         }                                                                                 \
8116     }                                                                                     \
8117     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name##_u64(    \
8118         const type_name* handles, uint64_t* handle_u64s, size_t count) {                  \
8119         allocPreserve(count);                                                             \
8120         for (size_t i = 0; i < count; ++i) {                                              \
8121             (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]);                           \
8122             if (handles[i]) {                                                             \
8123                 handle_u64s[i] =                                                          \
8124                     (uint64_t)VkDecoderGlobalState::get()->unbox_##type_name(handles[i]); \
8125             } else {                                                                      \
8126                 handle_u64s[i] = 0;                                                       \
8127             }                                                                             \
8128         }                                                                                 \
8129     }                                                                                     \
8130     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_u64_##type_name(      \
8131         const uint64_t* handle_u64s, type_name* handles, size_t count) {                  \
8132         allocPreserve(count);                                                             \
8133         for (size_t i = 0; i < count; ++i) {                                              \
8134             (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]);                           \
8135             if (handle_u64s[i]) {                                                         \
8136                 handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(              \
8137                     (type_name)(uintptr_t)handle_u64s[i]);                                \
8138             } else {                                                                      \
8139                 handles[i] = (type_name) nullptr;                                         \
8140             }                                                                             \
8141         }                                                                                 \
8142     }
8143 
8144 #define BOXED_NON_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL(type_name)    \
8145     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name(          \
8146         type_name* handles, size_t count) {                                               \
8147         allocPreserve(count);                                                             \
8148         for (size_t i = 0; i < count; ++i) {                                              \
8149             (*mPreserveBufPtr)[i] = (uint64_t)(handles[i]);                               \
8150             if (handles[i]) {                                                             \
8151                 auto boxed = handles[i];                                                  \
8152                 handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(handles[i]);  \
8153                 delete_##type_name(boxed);                                                \
8154             } else {                                                                      \
8155                 handles[i] = (type_name) nullptr;                                         \
8156             };                                                                            \
8157         }                                                                                 \
8158     }                                                                                     \
8159     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name##_u64(    \
8160         const type_name* handles, uint64_t* handle_u64s, size_t count) {                  \
8161         allocPreserve(count);                                                             \
8162         for (size_t i = 0; i < count; ++i) {                                              \
8163             (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]);                           \
8164             if (handles[i]) {                                                             \
8165                 auto boxed = handles[i];                                                  \
8166                 handle_u64s[i] =                                                          \
8167                     (uint64_t)VkDecoderGlobalState::get()->unbox_##type_name(handles[i]); \
8168                 delete_##type_name(boxed);                                                \
8169             } else {                                                                      \
8170                 handle_u64s[i] = 0;                                                       \
8171             }                                                                             \
8172         }                                                                                 \
8173     }                                                                                     \
8174     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_u64_##type_name(      \
8175         const uint64_t* handle_u64s, type_name* handles, size_t count) {                  \
8176         allocPreserve(count);                                                             \
8177         for (size_t i = 0; i < count; ++i) {                                              \
8178             (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]);                           \
8179             if (handle_u64s[i]) {                                                         \
8180                 auto boxed = (type_name)(uintptr_t)handle_u64s[i];                        \
8181                 handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(              \
8182                     (type_name)(uintptr_t)handle_u64s[i]);                                \
8183                 delete_##type_name(boxed);                                                \
8184             } else {                                                                      \
8185                 handles[i] = (type_name) nullptr;                                         \
8186             }                                                                             \
8187         }                                                                                 \
8188     }
8189 
8190 GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(
8191     BOXED_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL)
8192 GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(
8193     BOXED_NON_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL)
8194 
8195 }  // namespace vk
8196 }  // namespace gfxstream
8197