1 // Copyright 2018 The Android Open Source Project
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expresso or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 #include "VkDecoderGlobalState.h"
15 
16 #include <algorithm>
17 #include <functional>
18 #include <list>
19 #include <memory>
20 #include <mutex>
21 #include <unordered_map>
22 #include <vector>
23 
24 #include "FrameBuffer.h"
25 #include "VkAndroidNativeBuffer.h"
26 #include "VkCommonOperations.h"
27 #include "VkDecoderContext.h"
28 #include "VkDecoderSnapshot.h"
29 #include "VulkanDispatch.h"
30 #include "VulkanStream.h"
31 #include "aemu/base/ManagedDescriptor.hpp"
32 #include "aemu/base/Optional.h"
33 #include "aemu/base/Tracing.h"
34 #include "aemu/base/containers/EntityManager.h"
35 #include "aemu/base/containers/HybridEntityManager.h"
36 #include "aemu/base/containers/Lookup.h"
37 #include "aemu/base/files/Stream.h"
38 #include "aemu/base/memory/SharedMemory.h"
39 #include "aemu/base/synchronization/ConditionVariable.h"
40 #include "aemu/base/synchronization/Lock.h"
41 #include "aemu/base/system/System.h"
42 #include "common/goldfish_vk_deepcopy.h"
43 #include "common/goldfish_vk_dispatch.h"
44 #include "common/goldfish_vk_marshaling.h"
45 #include "common/goldfish_vk_reserved_marshaling.h"
46 #include "compressedTextureFormats/AstcCpuDecompressor.h"
47 #include "host-common/GfxstreamFatalError.h"
48 #include "host-common/HostmemIdMapping.h"
49 #include "host-common/address_space_device_control_ops.h"
50 #include "host-common/emugl_vm_operations.h"
51 #include "host-common/feature_control.h"
52 #include "host-common/vm_operations.h"
53 #include "utils/RenderDoc.h"
54 #include "vk_util.h"
55 #include "vulkan/emulated_textures/AstcTexture.h"
56 #include "vulkan/emulated_textures/CompressedImageInfo.h"
57 #include "vulkan/vk_enum_string_helper.h"
58 
59 #ifndef _WIN32
60 #include <unistd.h>
61 #endif
62 
63 #ifdef __APPLE__
64 #include <CoreFoundation/CoreFoundation.h>
65 #endif
66 
67 #include <climits>
68 
69 namespace gfxstream {
70 namespace vk {
71 
72 using android::base::AutoLock;
73 using android::base::ConditionVariable;
74 using android::base::DescriptorType;
75 using android::base::Lock;
76 using android::base::ManagedDescriptor;
77 using android::base::MetricEventBadPacketLength;
78 using android::base::MetricEventDuplicateSequenceNum;
79 using android::base::MetricEventVulkanOutOfMemory;
80 using android::base::Optional;
81 using android::base::SharedMemory;
82 using android::base::StaticLock;
83 using android::emulation::HostmemIdMapping;
84 using android::emulation::ManagedDescriptorInfo;
85 using android::emulation::VulkanInfo;
86 using emugl::ABORT_REASON_OTHER;
87 using emugl::FatalError;
88 using emugl::GfxApiLogger;
89 
90 // TODO(b/261477138): Move to a shared aemu definition
91 #define __ALIGN_MASK(x, mask) (((x) + (mask)) & ~(mask))
92 #define __ALIGN(x, a) __ALIGN_MASK(x, (__typeof__(x))(a)-1)
93 
94 // TODO: Asserts build
95 #define DCHECK(condition) (void)(condition);
96 
97 #define VKDGS_DEBUG 0
98 
99 #if VKDGS_DEBUG
100 #define VKDGS_LOG(fmt, ...) fprintf(stderr, "%s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
101 #else
102 #define VKDGS_LOG(fmt, ...)
103 #endif
104 
105 // Blob mem
106 #define STREAM_BLOB_MEM_GUEST 1
107 #define STREAM_BLOB_MEM_HOST3D 2
108 #define STREAM_BLOB_MEM_HOST3D_GUEST 3
109 
110 // Blob flags
111 #define STREAM_BLOB_FLAG_USE_MAPPABLE 1
112 #define STREAM_BLOB_FLAG_USE_SHAREABLE 2
113 #define STREAM_BLOB_FLAG_USE_CROSS_DEVICE 4
114 #define STREAM_BLOB_FLAG_CREATE_GUEST_HANDLE 8
115 
116 #define VALIDATE_REQUIRED_HANDLE(parameter) \
117     validateRequiredHandle(__FUNCTION__, #parameter, parameter)
118 
119 template <typename T>
validateRequiredHandle(const char * api_name,const char * parameter_name,T value)120 void validateRequiredHandle(const char* api_name, const char* parameter_name, T value) {
121     if (value == VK_NULL_HANDLE) {
122         GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << api_name << ":" << parameter_name;
123     }
124 }
125 
126 // A list of device extensions that should not be passed to the host driver.
127 // These will mainly include Vulkan features that we emulate ourselves.
128 static constexpr const char* const kEmulatedDeviceExtensions[] = {
129     "VK_ANDROID_external_memory_android_hardware_buffer",
130     "VK_ANDROID_native_buffer",
131     "VK_FUCHSIA_buffer_collection",
132     "VK_FUCHSIA_external_memory",
133     "VK_FUCHSIA_external_semaphore",
134     VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME,
135     VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME,
136     VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME,
137     VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME,
138     VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME,
139     VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME,
140     VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
141 };
142 
143 // A list of instance extensions that should not be passed to the host driver.
144 // On older pre-1.1 Vulkan platforms, gfxstream emulates these features.
145 static constexpr const char* const kEmulatedInstanceExtensions[] = {
146     VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME,
147     VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME,
148     VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME,
149 };
150 
151 static constexpr uint32_t kMaxSafeVersion = VK_MAKE_VERSION(1, 1, 0);
152 static constexpr uint32_t kMinVersion = VK_MAKE_VERSION(1, 0, 0);
153 
154 static constexpr uint64_t kPageSizeforBlob = 4096;
155 static constexpr uint64_t kPageMaskForBlob = ~(0xfff);
156 
157 static uint64_t hostBlobId = 0;
158 
159 #define DEFINE_BOXED_HANDLE_TYPE_TAG(type) Tag_##type,
160 
161 enum BoxedHandleTypeTag {
162     Tag_Invalid = 0,
163     GOLDFISH_VK_LIST_HANDLE_TYPES_BY_STAGE(DEFINE_BOXED_HANDLE_TYPE_TAG)
164 };
165 
166 template <class T>
167 class BoxedHandleManager {
168    public:
169     // The hybrid entity manager uses a sequence lock to protect access to
170     // a working set of 16000 handles, allowing us to avoid using a regular
171     // lock for those. Performance is degraded when going over this number,
172     // as it will then fall back to a std::map.
173     //
174     // We use 16000 as the max number of live handles to track; we don't
175     // expect the system to go over 16000 total live handles, outside some
176     // dEQP object management tests.
177     using Store = android::base::HybridEntityManager<16000, uint64_t, T>;
178 
179     Lock lock;
180     mutable Store store;
181     std::unordered_map<uint64_t, uint64_t> reverseMap;
182     struct DelayedRemove {
183         uint64_t handle;
184         std::function<void()> callback;
185     };
186     std::unordered_map<VkDevice, std::vector<DelayedRemove>> delayedRemoves;
187 
clear()188     void clear() {
189         reverseMap.clear();
190         store.clear();
191     }
192 
add(const T & item,BoxedHandleTypeTag tag)193     uint64_t add(const T& item, BoxedHandleTypeTag tag) {
194         auto res = (uint64_t)store.add(item, (size_t)tag);
195         AutoLock l(lock);
196         reverseMap[(uint64_t)(item.underlying)] = res;
197         return res;
198     }
199 
addFixed(uint64_t handle,const T & item,BoxedHandleTypeTag tag)200     uint64_t addFixed(uint64_t handle, const T& item, BoxedHandleTypeTag tag) {
201         auto res = (uint64_t)store.addFixed(handle, item, (size_t)tag);
202         AutoLock l(lock);
203         reverseMap[(uint64_t)(item.underlying)] = res;
204         return res;
205     }
206 
remove(uint64_t h)207     void remove(uint64_t h) {
208         auto item = get(h);
209         if (item) {
210             AutoLock l(lock);
211             reverseMap.erase((uint64_t)(item->underlying));
212         }
213         store.remove(h);
214     }
215 
removeDelayed(uint64_t h,VkDevice device,std::function<void ()> callback)216     void removeDelayed(uint64_t h, VkDevice device, std::function<void()> callback) {
217         AutoLock l(lock);
218         delayedRemoves[device].push_back({h, callback});
219     }
220 
processDelayedRemovesGlobalStateLocked(VkDevice device)221     void processDelayedRemovesGlobalStateLocked(VkDevice device) {
222         AutoLock l(lock);
223         auto it = delayedRemoves.find(device);
224         if (it == delayedRemoves.end()) return;
225         auto& delayedRemovesList = it->second;
226         for (const auto& r : delayedRemovesList) {
227             auto h = r.handle;
228             // VkDecoderGlobalState is already locked when callback is called.
229             auto funcGlobalStateLocked = r.callback;
230             funcGlobalStateLocked();
231             store.remove(h);
232         }
233         delayedRemovesList.clear();
234         delayedRemoves.erase(it);
235     }
236 
get(uint64_t h)237     T* get(uint64_t h) { return (T*)store.get_const(h); }
238 
getBoxedFromUnboxedLocked(uint64_t unboxed)239     uint64_t getBoxedFromUnboxedLocked(uint64_t unboxed) {
240         auto* res = android::base::find(reverseMap, unboxed);
241         if (!res) return 0;
242         return *res;
243     }
244 };
245 
246 struct OrderMaintenanceInfo {
247     uint32_t sequenceNumber = 0;
248     Lock lock;
249     ConditionVariable cv;
250 
251     uint32_t refcount = 1;
252 
incRefgfxstream::vk::OrderMaintenanceInfo253     void incRef() { __atomic_add_fetch(&refcount, 1, __ATOMIC_SEQ_CST); }
254 
decRefgfxstream::vk::OrderMaintenanceInfo255     bool decRef() { return 0 == __atomic_sub_fetch(&refcount, 1, __ATOMIC_SEQ_CST); }
256 };
257 
acquireOrderMaintInfo(OrderMaintenanceInfo * ord)258 static void acquireOrderMaintInfo(OrderMaintenanceInfo* ord) {
259     if (!ord) return;
260     ord->incRef();
261 }
262 
releaseOrderMaintInfo(OrderMaintenanceInfo * ord)263 static void releaseOrderMaintInfo(OrderMaintenanceInfo* ord) {
264     if (!ord) return;
265     if (ord->decRef()) delete ord;
266 }
267 
268 template <class T>
269 class DispatchableHandleInfo {
270    public:
271     T underlying;
272     VulkanDispatch* dispatch = nullptr;
273     bool ownDispatch = false;
274     OrderMaintenanceInfo* ordMaintInfo = nullptr;
275     VulkanMemReadingStream* readStream = nullptr;
276 };
277 
278 static BoxedHandleManager<DispatchableHandleInfo<uint64_t>> sBoxedHandleManager;
279 
280 struct ReadStreamRegistry {
281     Lock mLock;
282 
283     std::vector<VulkanMemReadingStream*> freeStreams;
284 
ReadStreamRegistrygfxstream::vk::ReadStreamRegistry285     ReadStreamRegistry() { freeStreams.reserve(100); };
286 
popgfxstream::vk::ReadStreamRegistry287     VulkanMemReadingStream* pop() {
288         AutoLock lock(mLock);
289         if (freeStreams.empty()) {
290             return new VulkanMemReadingStream(0);
291         } else {
292             VulkanMemReadingStream* res = freeStreams.back();
293             freeStreams.pop_back();
294             return res;
295         }
296     }
297 
pushgfxstream::vk::ReadStreamRegistry298     void push(VulkanMemReadingStream* stream) {
299         AutoLock lock(mLock);
300         freeStreams.push_back(stream);
301     }
302 };
303 
304 static ReadStreamRegistry sReadStreamRegistry;
305 
306 class VkDecoderGlobalState::Impl {
307    public:
Impl()308     Impl()
309         : m_vk(vkDispatch()),
310           m_emu(getGlobalVkEmulation()),
311           mRenderDocWithMultipleVkInstances(m_emu->guestRenderDoc.get()) {
312         mSnapshotsEnabled = feature_is_enabled(kFeature_VulkanSnapshots);
313         mVkCleanupEnabled =
314             android::base::getEnvironmentVariable("ANDROID_EMU_VK_NO_CLEANUP") != "1";
315         mLogging = android::base::getEnvironmentVariable("ANDROID_EMU_VK_LOG_CALLS") == "1";
316         mVerbosePrints = android::base::getEnvironmentVariable("ANDROID_EMUGL_VERBOSE") == "1";
317         if (get_emugl_address_space_device_control_ops().control_get_hw_funcs &&
318             get_emugl_address_space_device_control_ops().control_get_hw_funcs()) {
319             mUseOldMemoryCleanupPath = 0 == get_emugl_address_space_device_control_ops()
320                                                 .control_get_hw_funcs()
321                                                 ->getPhysAddrStartLocked();
322         }
323         mGuestUsesAngle = feature_is_enabled(kFeature_GuestUsesAngle);
324     }
325 
326     ~Impl() = default;
327 
328     // Resets all internal tracking info.
329     // Assumes that the heavyweight cleanup operations
330     // have already happened.
clear()331     void clear() {
332         mInstanceInfo.clear();
333         mPhysdevInfo.clear();
334         mDeviceInfo.clear();
335         mImageInfo.clear();
336         mImageViewInfo.clear();
337         mSamplerInfo.clear();
338         mCmdBufferInfo.clear();
339         mCmdPoolInfo.clear();
340         mDeviceToPhysicalDevice.clear();
341         mPhysicalDeviceToInstance.clear();
342         mQueueInfo.clear();
343         mBufferInfo.clear();
344         mMemoryInfo.clear();
345         mShaderModuleInfo.clear();
346         mPipelineCacheInfo.clear();
347         mPipelineInfo.clear();
348         mRenderPassInfo.clear();
349         mFramebufferInfo.clear();
350         mSemaphoreInfo.clear();
351         mFenceInfo.clear();
352 #ifdef _WIN32
353         mSemaphoreId = 1;
354         mExternalSemaphoresById.clear();
355 #endif
356         mDescriptorUpdateTemplateInfo.clear();
357 
358         mCreatedHandlesForSnapshotLoad.clear();
359         mCreatedHandlesForSnapshotLoadIndex = 0;
360 
361         sBoxedHandleManager.clear();
362     }
363 
snapshotsEnabled() const364     bool snapshotsEnabled() const { return mSnapshotsEnabled; }
365 
vkCleanupEnabled() const366     bool vkCleanupEnabled() const { return mVkCleanupEnabled; }
367 
save(android::base::Stream * stream)368     void save(android::base::Stream* stream) { snapshot()->save(stream); }
369 
load(android::base::Stream * stream,GfxApiLogger & gfxLogger,HealthMonitor<> * healthMonitor)370     void load(android::base::Stream* stream, GfxApiLogger& gfxLogger,
371               HealthMonitor<>* healthMonitor) {
372         // assume that we already destroyed all instances
373         // from FrameBuffer's onLoad method.
374 
375         // destroy all current internal data structures
376         clear();
377 
378         // this part will replay in the decoder
379         snapshot()->load(stream, gfxLogger, healthMonitor);
380     }
381 
lock()382     void lock() { mLock.lock(); }
383 
unlock()384     void unlock() { mLock.unlock(); }
385 
setCreatedHandlesForSnapshotLoad(const unsigned char * buffer)386     size_t setCreatedHandlesForSnapshotLoad(const unsigned char* buffer) {
387         size_t consumed = 0;
388 
389         if (!buffer) return consumed;
390 
391         uint32_t bufferSize = *(uint32_t*)buffer;
392 
393         consumed += 4;
394 
395         uint32_t handleCount = bufferSize / 8;
396         VKDGS_LOG("incoming handle count: %u", handleCount);
397 
398         uint64_t* handles = (uint64_t*)(buffer + 4);
399 
400         mCreatedHandlesForSnapshotLoad.clear();
401         mCreatedHandlesForSnapshotLoadIndex = 0;
402 
403         for (uint32_t i = 0; i < handleCount; ++i) {
404             VKDGS_LOG("handle to load: 0x%llx", (unsigned long long)(uintptr_t)handles[i]);
405             mCreatedHandlesForSnapshotLoad.push_back(handles[i]);
406             consumed += 8;
407         }
408 
409         return consumed;
410     }
411 
clearCreatedHandlesForSnapshotLoad()412     void clearCreatedHandlesForSnapshotLoad() {
413         mCreatedHandlesForSnapshotLoad.clear();
414         mCreatedHandlesForSnapshotLoadIndex = 0;
415     }
416 
on_vkEnumerateInstanceVersion(android::base::BumpPool * pool,uint32_t * pApiVersion)417     VkResult on_vkEnumerateInstanceVersion(android::base::BumpPool* pool, uint32_t* pApiVersion) {
418         if (m_vk->vkEnumerateInstanceVersion) {
419             VkResult res = m_vk->vkEnumerateInstanceVersion(pApiVersion);
420 
421             if (*pApiVersion > kMaxSafeVersion) {
422                 *pApiVersion = kMaxSafeVersion;
423             }
424 
425             return res;
426         }
427         *pApiVersion = kMinVersion;
428         return VK_SUCCESS;
429     }
430 
on_vkCreateInstance(android::base::BumpPool * pool,const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)431     VkResult on_vkCreateInstance(android::base::BumpPool* pool,
432                                  const VkInstanceCreateInfo* pCreateInfo,
433                                  const VkAllocationCallbacks* pAllocator, VkInstance* pInstance) {
434         std::vector<const char*> finalExts = filteredInstanceExtensionNames(
435             pCreateInfo->enabledExtensionCount, pCreateInfo->ppEnabledExtensionNames);
436 
437         if (pCreateInfo->pApplicationInfo) {
438             if (pCreateInfo->pApplicationInfo->pApplicationName)
439                 INFO("Creating Vulkan instance for app: %s",
440                      pCreateInfo->pApplicationInfo->pApplicationName);
441             if (pCreateInfo->pApplicationInfo->pEngineName)
442                 INFO("Creating Vulkan instance for engine: %s",
443                      pCreateInfo->pApplicationInfo->pEngineName);
444         }
445 
446         // Create higher version instance whenever it is possible.
447         uint32_t apiVersion = VK_MAKE_VERSION(1, 0, 0);
448         if (pCreateInfo->pApplicationInfo) {
449             apiVersion = pCreateInfo->pApplicationInfo->apiVersion;
450         }
451         if (m_vk->vkEnumerateInstanceVersion) {
452             uint32_t instanceVersion;
453             VkResult result = m_vk->vkEnumerateInstanceVersion(&instanceVersion);
454             if (result == VK_SUCCESS && instanceVersion >= VK_MAKE_VERSION(1, 1, 0)) {
455                 apiVersion = instanceVersion;
456             }
457         }
458 
459         VkInstanceCreateInfo createInfoFiltered;
460         VkApplicationInfo appInfo = {};
461         deepcopy_VkInstanceCreateInfo(pool, VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, pCreateInfo,
462                                       &createInfoFiltered);
463 
464         createInfoFiltered.enabledExtensionCount = static_cast<uint32_t>(finalExts.size());
465         createInfoFiltered.ppEnabledExtensionNames = finalExts.data();
466         if (createInfoFiltered.pApplicationInfo != nullptr) {
467             const_cast<VkApplicationInfo*>(createInfoFiltered.pApplicationInfo)->apiVersion =
468                 apiVersion;
469             appInfo = *createInfoFiltered.pApplicationInfo;
470         }
471 
472         // remove VkDebugReportCallbackCreateInfoEXT and
473         // VkDebugUtilsMessengerCreateInfoEXT from the chain.
474         auto* curr = reinterpret_cast<vk_struct_common*>(&createInfoFiltered);
475         while (curr != nullptr) {
476             if (curr->pNext != nullptr &&
477                 (curr->pNext->sType == VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT ||
478                  curr->pNext->sType == VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT)) {
479                 curr->pNext = curr->pNext->pNext;
480             }
481             curr = curr->pNext;
482         }
483 
484         // bug: 155795731
485         bool swiftshader =
486             (android::base::getEnvironmentVariable("ANDROID_EMU_VK_ICD").compare("swiftshader") ==
487              0);
488         std::unique_ptr<std::lock_guard<std::recursive_mutex>> lock = nullptr;
489 
490         if (swiftshader) {
491             if (mLogging) {
492                 fprintf(stderr, "%s: acquire lock\n", __func__);
493             }
494             lock = std::make_unique<std::lock_guard<std::recursive_mutex>>(mLock);
495         }
496 
497         VkResult res = m_vk->vkCreateInstance(&createInfoFiltered, pAllocator, pInstance);
498 
499         if (res != VK_SUCCESS) {
500             return res;
501         }
502 
503         if (!swiftshader) {
504             lock = std::make_unique<std::lock_guard<std::recursive_mutex>>(mLock);
505         }
506 
507         // TODO: bug 129484301
508         get_emugl_vm_operations().setSkipSnapshotSave(
509             !feature_is_enabled(kFeature_VulkanSnapshots));
510 
511         InstanceInfo info;
512         info.apiVersion = apiVersion;
513         for (uint32_t i = 0; i < createInfoFiltered.enabledExtensionCount; ++i) {
514             info.enabledExtensionNames.push_back(createInfoFiltered.ppEnabledExtensionNames[i]);
515         }
516 
517         // Box it up
518         VkInstance boxed = new_boxed_VkInstance(*pInstance, nullptr, true /* own dispatch */);
519         init_vulkan_dispatch_from_instance(m_vk, *pInstance, dispatch_VkInstance(boxed));
520         info.boxed = boxed;
521 
522 #ifdef VK_MVK_moltenvk
523         if (m_emu->instanceSupportsMoltenVK) {
524             if (!m_vk->vkSetMTLTextureMVK) {
525                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << "Cannot find vkSetMTLTextureMVK";
526             }
527         }
528 #endif
529 
530         std::string_view appName = appInfo.pApplicationName ? appInfo.pApplicationName : "";
531         std::string_view engineName = appInfo.pEngineName ? appInfo.pEngineName : "";
532 
533         // TODO(gregschlom) Use a better criteria to determine when to use ASTC CPU decompression.
534         //   The goal is to only enable ASTC CPU decompression for specific applications.
535         //   Theoretically the pApplicationName field would be exactly what we want, unfortunately
536         //   it looks like Unity apps always set this to "Unity" instead of the actual application.
537         //   Eventually we will want to use https://r.android.com/2163499 for this purpose.
538         const bool isUnity = appName == "Unity" && engineName == "Unity";
539         if (m_emu->astcLdrEmulationMode == AstcEmulationMode::CpuOnly ||
540             (m_emu->astcLdrEmulationMode == AstcEmulationMode::Auto && isUnity)) {
541             info.useAstcCpuDecompression = true;
542         }
543 
544         info.isAngle = (engineName == "ANGLE");
545 
546         mInstanceInfo[*pInstance] = info;
547 
548         *pInstance = (VkInstance)info.boxed;
549 
550         auto fb = FrameBuffer::getFB();
551         if (!fb) return res;
552 
553         if (vkCleanupEnabled()) {
554             fb->registerProcessCleanupCallback(unbox_VkInstance(boxed), [this, boxed] {
555                 vkDestroyInstanceImpl(unbox_VkInstance(boxed), nullptr);
556             });
557         }
558 
559         return res;
560     }
561 
vkDestroyInstanceImpl(VkInstance instance,const VkAllocationCallbacks * pAllocator)562     void vkDestroyInstanceImpl(VkInstance instance, const VkAllocationCallbacks* pAllocator) {
563         // Do delayed removes out of the lock, but get the list of devices to destroy inside the
564         // lock.
565         {
566             std::lock_guard<std::recursive_mutex> lock(mLock);
567             std::vector<VkDevice> devicesToDestroy;
568 
569             for (auto it : mDeviceToPhysicalDevice) {
570                 auto* otherInstance = android::base::find(mPhysicalDeviceToInstance, it.second);
571                 if (!otherInstance) continue;
572                 if (instance == *otherInstance) {
573                     devicesToDestroy.push_back(it.first);
574                 }
575             }
576 
577             for (auto device : devicesToDestroy) {
578                 sBoxedHandleManager.processDelayedRemovesGlobalStateLocked(device);
579             }
580         }
581 
582         std::lock_guard<std::recursive_mutex> lock(mLock);
583 
584         teardownInstanceLocked(instance);
585 
586         if (mRenderDocWithMultipleVkInstances) {
587             mRenderDocWithMultipleVkInstances->removeVkInstance(instance);
588         }
589         m_vk->vkDestroyInstance(instance, pAllocator);
590 
591         auto it = mPhysicalDeviceToInstance.begin();
592 
593         while (it != mPhysicalDeviceToInstance.end()) {
594             if (it->second == instance) {
595                 it = mPhysicalDeviceToInstance.erase(it);
596             } else {
597                 ++it;
598             }
599         }
600 
601         auto* instInfo = android::base::find(mInstanceInfo, instance);
602         delete_VkInstance(instInfo->boxed);
603         mInstanceInfo.erase(instance);
604     }
605 
on_vkDestroyInstance(android::base::BumpPool * pool,VkInstance boxed_instance,const VkAllocationCallbacks * pAllocator)606     void on_vkDestroyInstance(android::base::BumpPool* pool, VkInstance boxed_instance,
607                               const VkAllocationCallbacks* pAllocator) {
608         auto instance = unbox_VkInstance(boxed_instance);
609 
610         vkDestroyInstanceImpl(instance, pAllocator);
611 
612         auto fb = FrameBuffer::getFB();
613         if (!fb) return;
614 
615         fb->unregisterProcessCleanupCallback(instance);
616     }
617 
on_vkEnumeratePhysicalDevices(android::base::BumpPool * pool,VkInstance boxed_instance,uint32_t * physicalDeviceCount,VkPhysicalDevice * physicalDevices)618     VkResult on_vkEnumeratePhysicalDevices(android::base::BumpPool* pool, VkInstance boxed_instance,
619                                            uint32_t* physicalDeviceCount,
620                                            VkPhysicalDevice* physicalDevices) {
621         auto instance = unbox_VkInstance(boxed_instance);
622         auto vk = dispatch_VkInstance(boxed_instance);
623 
624         uint32_t physicalDevicesSize = 0;
625         if (physicalDeviceCount) {
626             physicalDevicesSize = *physicalDeviceCount;
627         }
628 
629         uint32_t actualPhysicalDeviceCount;
630         auto res = vk->vkEnumeratePhysicalDevices(instance, &actualPhysicalDeviceCount, nullptr);
631         if (res != VK_SUCCESS) {
632             return res;
633         }
634         std::vector<VkPhysicalDevice> validPhysicalDevices(actualPhysicalDeviceCount);
635         res = vk->vkEnumeratePhysicalDevices(instance, &actualPhysicalDeviceCount,
636                                              validPhysicalDevices.data());
637         if (res != VK_SUCCESS) return res;
638 
639         std::lock_guard<std::recursive_mutex> lock(mLock);
640 
641         if (m_emu->instanceSupportsExternalMemoryCapabilities) {
642             PFN_vkGetPhysicalDeviceProperties2KHR getPhysdevProps2Func =
643                 vk_util::getVkInstanceProcAddrWithFallback<
644                     vk_util::vk_fn_info::GetPhysicalDeviceProperties2>(
645                     {
646                         vk->vkGetInstanceProcAddr,
647                         m_vk->vkGetInstanceProcAddr,
648                     },
649                     instance);
650 
651             if (getPhysdevProps2Func) {
652                 validPhysicalDevices.erase(
653                     std::remove_if(validPhysicalDevices.begin(), validPhysicalDevices.end(),
654                                    [getPhysdevProps2Func, this](VkPhysicalDevice physicalDevice) {
655                                        // We can get the device UUID.
656                                        VkPhysicalDeviceIDPropertiesKHR idProps = {
657                                            VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR,
658                                            nullptr,
659                                        };
660                                        VkPhysicalDeviceProperties2KHR propsWithId = {
661                                            VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
662                                            &idProps,
663                                        };
664                                        getPhysdevProps2Func(physicalDevice, &propsWithId);
665 
666                                        // Remove those devices whose UUIDs don't match the one
667                                        // in VkCommonOperations.
668                                        return memcmp(m_emu->deviceInfo.idProps.deviceUUID,
669                                                      idProps.deviceUUID, VK_UUID_SIZE) != 0;
670                                    }),
671                     validPhysicalDevices.end());
672             } else {
673                 fprintf(stderr,
674                         "%s: warning: failed to "
675                         "vkGetPhysicalDeviceProperties2KHR\n",
676                         __func__);
677             }
678         } else {
679             // If we don't support ID properties then just advertise only the
680             // first physical device.
681             fprintf(stderr,
682                     "%s: device id properties not supported, using first "
683                     "physical device\n",
684                     __func__);
685         }
686         if (!validPhysicalDevices.empty()) {
687             validPhysicalDevices.erase(std::next(validPhysicalDevices.begin()),
688                                        validPhysicalDevices.end());
689         }
690 
691         if (physicalDeviceCount) {
692             *physicalDeviceCount = validPhysicalDevices.size();
693         }
694 
695         if (physicalDeviceCount && physicalDevices) {
696             // Box them up
697             for (uint32_t i = 0; i < std::min(*physicalDeviceCount, physicalDevicesSize); ++i) {
698                 mPhysicalDeviceToInstance[validPhysicalDevices[i]] = instance;
699 
700                 auto& physdevInfo = mPhysdevInfo[validPhysicalDevices[i]];
701 
702                 physdevInfo.boxed = new_boxed_VkPhysicalDevice(validPhysicalDevices[i], vk,
703                                                                false /* does not own dispatch */);
704 
705                 vk->vkGetPhysicalDeviceProperties(validPhysicalDevices[i], &physdevInfo.props);
706 
707                 if (physdevInfo.props.apiVersion > kMaxSafeVersion) {
708                     physdevInfo.props.apiVersion = kMaxSafeVersion;
709                 }
710 
711                 vk->vkGetPhysicalDeviceMemoryProperties(validPhysicalDevices[i],
712                                                         &physdevInfo.memoryProperties);
713 
714                 uint32_t queueFamilyPropCount = 0;
715 
716                 vk->vkGetPhysicalDeviceQueueFamilyProperties(validPhysicalDevices[i],
717                                                              &queueFamilyPropCount, nullptr);
718 
719                 physdevInfo.queueFamilyProperties.resize((size_t)queueFamilyPropCount);
720 
721                 vk->vkGetPhysicalDeviceQueueFamilyProperties(
722                     validPhysicalDevices[i], &queueFamilyPropCount,
723                     physdevInfo.queueFamilyProperties.data());
724 
725                 physicalDevices[i] = (VkPhysicalDevice)physdevInfo.boxed;
726             }
727             if (physicalDevicesSize < *physicalDeviceCount) {
728                 res = VK_INCOMPLETE;
729             }
730         }
731 
732         return res;
733     }
734 
on_vkGetPhysicalDeviceFeatures(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceFeatures * pFeatures)735     void on_vkGetPhysicalDeviceFeatures(android::base::BumpPool* pool,
736                                         VkPhysicalDevice boxed_physicalDevice,
737                                         VkPhysicalDeviceFeatures* pFeatures) {
738         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
739         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
740 
741         vk->vkGetPhysicalDeviceFeatures(physicalDevice, pFeatures);
742         pFeatures->textureCompressionETC2 |= enableEmulatedEtc2(physicalDevice, vk);
743         pFeatures->textureCompressionASTC_LDR |= enableEmulatedAstc(physicalDevice, vk);
744     }
745 
on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)746     void on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool* pool,
747                                          VkPhysicalDevice boxed_physicalDevice,
748                                          VkPhysicalDeviceFeatures2* pFeatures) {
749         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
750         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
751 
752         std::lock_guard<std::recursive_mutex> lock(mLock);
753 
754         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
755         if (!physdevInfo) return;
756 
757         auto instance = mPhysicalDeviceToInstance[physicalDevice];
758         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
759         if (!instanceInfo) return;
760 
761         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
762             physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
763             vk->vkGetPhysicalDeviceFeatures2(physicalDevice, pFeatures);
764         } else if (hasInstanceExtension(instance,
765                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
766             vk->vkGetPhysicalDeviceFeatures2KHR(physicalDevice, pFeatures);
767         } else {
768             // No instance extension, fake it!!!!
769             if (pFeatures->pNext) {
770                 fprintf(stderr,
771                         "%s: Warning: Trying to use extension struct in "
772                         "VkPhysicalDeviceFeatures2 without having enabled "
773                         "the extension!!!!11111\n",
774                         __func__);
775             }
776             *pFeatures = {
777                 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
778                 0,
779             };
780             vk->vkGetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
781         }
782 
783         pFeatures->features.textureCompressionETC2 |= enableEmulatedEtc2(physicalDevice, vk);
784         pFeatures->features.textureCompressionASTC_LDR |= enableEmulatedAstc(physicalDevice, vk);
785         VkPhysicalDeviceSamplerYcbcrConversionFeatures* ycbcrFeatures =
786             vk_find_struct<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pFeatures);
787         if (ycbcrFeatures != nullptr) {
788             ycbcrFeatures->samplerYcbcrConversion |= m_emu->enableYcbcrEmulation;
789         }
790     }
791 
on_vkGetPhysicalDeviceImageFormatProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)792     VkResult on_vkGetPhysicalDeviceImageFormatProperties(
793         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice, VkFormat format,
794         VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
795         VkImageFormatProperties* pImageFormatProperties) {
796         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
797         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
798         const bool emulatedTexture = isEmulatedCompressedTexture(format, physicalDevice, vk);
799         if (emulatedTexture) {
800             if (!supportEmulatedCompressedImageFormatProperty(format, type, tiling, usage, flags)) {
801                 memset(pImageFormatProperties, 0, sizeof(VkImageFormatProperties));
802                 return VK_ERROR_FORMAT_NOT_SUPPORTED;
803             }
804             flags &= ~VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR;
805             flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
806             usage |= VK_IMAGE_USAGE_STORAGE_BIT;
807             format = CompressedImageInfo::getCompressedMipmapsFormat(format);
808         }
809 
810         VkResult res = vk->vkGetPhysicalDeviceImageFormatProperties(
811             physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
812         if (res != VK_SUCCESS) {
813             return res;
814         }
815         if (emulatedTexture) {
816             maskImageFormatPropertiesForEmulatedTextures(pImageFormatProperties);
817         }
818         return res;
819     }
820 
on_vkGetPhysicalDeviceImageFormatProperties2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)821     VkResult on_vkGetPhysicalDeviceImageFormatProperties2(
822         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
823         const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
824         VkImageFormatProperties2* pImageFormatProperties) {
825         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
826         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
827         VkPhysicalDeviceImageFormatInfo2 imageFormatInfo;
828         VkFormat format = pImageFormatInfo->format;
829         const bool emulatedTexture = isEmulatedCompressedTexture(format, physicalDevice, vk);
830         if (emulatedTexture) {
831             if (!supportEmulatedCompressedImageFormatProperty(
832                     pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling,
833                     pImageFormatInfo->usage, pImageFormatInfo->flags)) {
834                 memset(&pImageFormatProperties->imageFormatProperties, 0,
835                        sizeof(VkImageFormatProperties));
836                 return VK_ERROR_FORMAT_NOT_SUPPORTED;
837             }
838             imageFormatInfo = *pImageFormatInfo;
839             pImageFormatInfo = &imageFormatInfo;
840             imageFormatInfo.flags &= ~VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR;
841             imageFormatInfo.flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
842             imageFormatInfo.usage |= VK_IMAGE_USAGE_STORAGE_BIT;
843             imageFormatInfo.format = CompressedImageInfo::getCompressedMipmapsFormat(format);
844         }
845         std::lock_guard<std::recursive_mutex> lock(mLock);
846 
847         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
848         if (!physdevInfo) {
849             return VK_ERROR_OUT_OF_HOST_MEMORY;
850         }
851 
852         VkResult res = VK_ERROR_INITIALIZATION_FAILED;
853 
854         auto instance = mPhysicalDeviceToInstance[physicalDevice];
855         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
856         if (!instanceInfo) {
857             return res;
858         }
859 
860         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
861             physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
862             res = vk->vkGetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo,
863                                                                 pImageFormatProperties);
864         } else if (hasInstanceExtension(instance,
865                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
866             res = vk->vkGetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo,
867                                                                    pImageFormatProperties);
868         } else {
869             // No instance extension, fake it!!!!
870             if (pImageFormatProperties->pNext) {
871                 fprintf(stderr,
872                         "%s: Warning: Trying to use extension struct in "
873                         "VkPhysicalDeviceFeatures2 without having enabled "
874                         "the extension!!!!11111\n",
875                         __func__);
876             }
877             *pImageFormatProperties = {
878                 VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
879                 0,
880             };
881             res = vk->vkGetPhysicalDeviceImageFormatProperties(
882                 physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type,
883                 pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags,
884                 &pImageFormatProperties->imageFormatProperties);
885         }
886         if (res != VK_SUCCESS) {
887             return res;
888         }
889 
890         const VkPhysicalDeviceExternalImageFormatInfo* extImageFormatInfo =
891             vk_find_struct<VkPhysicalDeviceExternalImageFormatInfo>(pImageFormatInfo);
892         VkExternalImageFormatProperties* extImageFormatProps =
893             vk_find_struct<VkExternalImageFormatProperties>(pImageFormatProperties);
894 
895         // Only allow dedicated allocations for external images.
896         if (extImageFormatInfo && extImageFormatProps) {
897             extImageFormatProps->externalMemoryProperties.externalMemoryFeatures |=
898                 VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT;
899         }
900 
901         if (emulatedTexture) {
902             maskImageFormatPropertiesForEmulatedTextures(
903                 &pImageFormatProperties->imageFormatProperties);
904         }
905 
906         return res;
907     }
908 
on_vkGetPhysicalDeviceFormatProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)909     void on_vkGetPhysicalDeviceFormatProperties(android::base::BumpPool* pool,
910                                                 VkPhysicalDevice boxed_physicalDevice,
911                                                 VkFormat format,
912                                                 VkFormatProperties* pFormatProperties) {
913         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
914         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
915         getPhysicalDeviceFormatPropertiesCore<VkFormatProperties>(
916             [vk](VkPhysicalDevice physicalDevice, VkFormat format,
917                  VkFormatProperties* pFormatProperties) {
918                 vk->vkGetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
919             },
920             vk, physicalDevice, format, pFormatProperties);
921     }
922 
on_vkGetPhysicalDeviceFormatProperties2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)923     void on_vkGetPhysicalDeviceFormatProperties2(android::base::BumpPool* pool,
924                                                  VkPhysicalDevice boxed_physicalDevice,
925                                                  VkFormat format,
926                                                  VkFormatProperties2* pFormatProperties) {
927         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
928         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
929 
930         std::lock_guard<std::recursive_mutex> lock(mLock);
931 
932         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
933         if (!physdevInfo) return;
934 
935         auto instance = mPhysicalDeviceToInstance[physicalDevice];
936         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
937         if (!instanceInfo) return;
938 
939         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
940             physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
941             getPhysicalDeviceFormatPropertiesCore<VkFormatProperties2>(
942                 [vk](VkPhysicalDevice physicalDevice, VkFormat format,
943                      VkFormatProperties2* pFormatProperties) {
944                     vk->vkGetPhysicalDeviceFormatProperties2(physicalDevice, format,
945                                                              pFormatProperties);
946                 },
947                 vk, physicalDevice, format, pFormatProperties);
948         } else if (hasInstanceExtension(instance,
949                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
950             getPhysicalDeviceFormatPropertiesCore<VkFormatProperties2>(
951                 [vk](VkPhysicalDevice physicalDevice, VkFormat format,
952                      VkFormatProperties2* pFormatProperties) {
953                     vk->vkGetPhysicalDeviceFormatProperties2KHR(physicalDevice, format,
954                                                                 pFormatProperties);
955                 },
956                 vk, physicalDevice, format, pFormatProperties);
957         } else {
958             // No instance extension, fake it!!!!
959             if (pFormatProperties->pNext) {
960                 fprintf(stderr,
961                         "%s: Warning: Trying to use extension struct in "
962                         "vkGetPhysicalDeviceFormatProperties2 without having "
963                         "enabled the extension!!!!11111\n",
964                         __func__);
965             }
966             pFormatProperties->sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2;
967             getPhysicalDeviceFormatPropertiesCore<VkFormatProperties>(
968                 [vk](VkPhysicalDevice physicalDevice, VkFormat format,
969                      VkFormatProperties* pFormatProperties) {
970                     vk->vkGetPhysicalDeviceFormatProperties(physicalDevice, format,
971                                                             pFormatProperties);
972                 },
973                 vk, physicalDevice, format, &pFormatProperties->formatProperties);
974         }
975     }
976 
on_vkGetPhysicalDeviceProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceProperties * pProperties)977     void on_vkGetPhysicalDeviceProperties(android::base::BumpPool* pool,
978                                           VkPhysicalDevice boxed_physicalDevice,
979                                           VkPhysicalDeviceProperties* pProperties) {
980         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
981         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
982 
983         vk->vkGetPhysicalDeviceProperties(physicalDevice, pProperties);
984 
985         if (pProperties->apiVersion > kMaxSafeVersion) {
986             pProperties->apiVersion = kMaxSafeVersion;
987         }
988     }
989 
on_vkGetPhysicalDeviceProperties2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceProperties2 * pProperties)990     void on_vkGetPhysicalDeviceProperties2(android::base::BumpPool* pool,
991                                            VkPhysicalDevice boxed_physicalDevice,
992                                            VkPhysicalDeviceProperties2* pProperties) {
993         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
994         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
995 
996         std::lock_guard<std::recursive_mutex> lock(mLock);
997 
998         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
999         if (!physdevInfo) return;
1000 
1001         auto instance = mPhysicalDeviceToInstance[physicalDevice];
1002         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
1003         if (!instanceInfo) return;
1004 
1005         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
1006             physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
1007             vk->vkGetPhysicalDeviceProperties2(physicalDevice, pProperties);
1008         } else if (hasInstanceExtension(instance,
1009                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1010             vk->vkGetPhysicalDeviceProperties2KHR(physicalDevice, pProperties);
1011         } else {
1012             // No instance extension, fake it!!!!
1013             if (pProperties->pNext) {
1014                 fprintf(stderr,
1015                         "%s: Warning: Trying to use extension struct in "
1016                         "VkPhysicalDeviceProperties2 without having enabled "
1017                         "the extension!!!!11111\n",
1018                         __func__);
1019             }
1020             *pProperties = {
1021                 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
1022                 0,
1023             };
1024             vk->vkGetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
1025         }
1026 
1027         if (pProperties->properties.apiVersion > kMaxSafeVersion) {
1028             pProperties->properties.apiVersion = kMaxSafeVersion;
1029         }
1030     }
1031 
on_vkGetPhysicalDeviceMemoryProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)1032     void on_vkGetPhysicalDeviceMemoryProperties(
1033         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
1034         VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
1035         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1036         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1037 
1038         vk->vkGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
1039 
1040         // Pick a max heap size that will work around
1041         // drivers that give bad suggestions (such as 0xFFFFFFFFFFFFFFFF for the heap size)
1042         // plus won't break the bank on 32-bit userspace.
1043         static constexpr VkDeviceSize kMaxSafeHeapSize = 2ULL * 1024ULL * 1024ULL * 1024ULL;
1044 
1045         for (uint32_t i = 0; i < pMemoryProperties->memoryTypeCount; ++i) {
1046             uint32_t heapIndex = pMemoryProperties->memoryTypes[i].heapIndex;
1047             auto& heap = pMemoryProperties->memoryHeaps[heapIndex];
1048 
1049             if (heap.size > kMaxSafeHeapSize) {
1050                 heap.size = kMaxSafeHeapSize;
1051             }
1052 
1053             if (!feature_is_enabled(kFeature_GLDirectMem) &&
1054                 !feature_is_enabled(kFeature_VirtioGpuNext)) {
1055                 pMemoryProperties->memoryTypes[i].propertyFlags =
1056                     pMemoryProperties->memoryTypes[i].propertyFlags &
1057                     ~(VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
1058             }
1059         }
1060     }
1061 
on_vkGetPhysicalDeviceMemoryProperties2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)1062     void on_vkGetPhysicalDeviceMemoryProperties2(
1063         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
1064         VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
1065         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1066         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1067 
1068         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
1069         if (!physdevInfo) return;
1070 
1071         auto instance = mPhysicalDeviceToInstance[physicalDevice];
1072         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
1073         if (!instanceInfo) return;
1074 
1075         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
1076             physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
1077             vk->vkGetPhysicalDeviceMemoryProperties2(physicalDevice, pMemoryProperties);
1078         } else if (hasInstanceExtension(instance,
1079                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1080             vk->vkGetPhysicalDeviceMemoryProperties2KHR(physicalDevice, pMemoryProperties);
1081         } else {
1082             // No instance extension, fake it!!!!
1083             if (pMemoryProperties->pNext) {
1084                 fprintf(stderr,
1085                         "%s: Warning: Trying to use extension struct in "
1086                         "VkPhysicalDeviceMemoryProperties2 without having enabled "
1087                         "the extension!!!!11111\n",
1088                         __func__);
1089             }
1090             *pMemoryProperties = {
1091                 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
1092                 0,
1093             };
1094             vk->vkGetPhysicalDeviceMemoryProperties(physicalDevice,
1095                                                     &pMemoryProperties->memoryProperties);
1096         }
1097 
1098         // Pick a max heap size that will work around
1099         // drivers that give bad suggestions (such as 0xFFFFFFFFFFFFFFFF for the heap size)
1100         // plus won't break the bank on 32-bit userspace.
1101         static constexpr VkDeviceSize kMaxSafeHeapSize = 2ULL * 1024ULL * 1024ULL * 1024ULL;
1102 
1103         for (uint32_t i = 0; i < pMemoryProperties->memoryProperties.memoryTypeCount; ++i) {
1104             uint32_t heapIndex = pMemoryProperties->memoryProperties.memoryTypes[i].heapIndex;
1105             auto& heap = pMemoryProperties->memoryProperties.memoryHeaps[heapIndex];
1106 
1107             if (heap.size > kMaxSafeHeapSize) {
1108                 heap.size = kMaxSafeHeapSize;
1109             }
1110 
1111             if (!feature_is_enabled(kFeature_GLDirectMem) &&
1112                 !feature_is_enabled(kFeature_VirtioGpuNext)) {
1113                 pMemoryProperties->memoryProperties.memoryTypes[i].propertyFlags =
1114                     pMemoryProperties->memoryProperties.memoryTypes[i].propertyFlags &
1115                     ~(VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
1116             }
1117         }
1118     }
1119 
on_vkEnumerateDeviceExtensionProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1120     VkResult on_vkEnumerateDeviceExtensionProperties(android::base::BumpPool* pool,
1121                                                      VkPhysicalDevice boxed_physicalDevice,
1122                                                      const char* pLayerName,
1123                                                      uint32_t* pPropertyCount,
1124                                                      VkExtensionProperties* pProperties) {
1125         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1126         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1127 
1128         bool shouldPassthrough = !m_emu->enableYcbcrEmulation;
1129 #ifdef VK_MVK_moltenvk
1130         shouldPassthrough = shouldPassthrough && !m_emu->instanceSupportsMoltenVK;
1131 #endif
1132         if (shouldPassthrough) {
1133             return vk->vkEnumerateDeviceExtensionProperties(physicalDevice, pLayerName,
1134                                                             pPropertyCount, pProperties);
1135         }
1136 
1137         // If MoltenVK is supported on host, we need to ensure that we include
1138         // VK_MVK_moltenvk extenstion in returned properties.
1139         std::vector<VkExtensionProperties> properties;
1140         VkResult result =
1141             enumerateDeviceExtensionProperties(vk, physicalDevice, pLayerName, properties);
1142         if (result != VK_SUCCESS) {
1143             return result;
1144         }
1145 
1146 #ifdef VK_MVK_moltenvk
1147         if (m_emu->instanceSupportsMoltenVK &&
1148             !hasDeviceExtension(properties, VK_MVK_MOLTENVK_EXTENSION_NAME)) {
1149             VkExtensionProperties mvk_props;
1150             strncpy(mvk_props.extensionName, VK_MVK_MOLTENVK_EXTENSION_NAME,
1151                     sizeof(mvk_props.extensionName));
1152             mvk_props.specVersion = VK_MVK_MOLTENVK_SPEC_VERSION;
1153             properties.push_back(mvk_props);
1154         }
1155 #endif
1156 
1157         if (m_emu->enableYcbcrEmulation &&
1158             !hasDeviceExtension(properties, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) {
1159             VkExtensionProperties ycbcr_props;
1160             strncpy(ycbcr_props.extensionName, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
1161                     sizeof(ycbcr_props.extensionName));
1162             ycbcr_props.specVersion = VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION;
1163             properties.push_back(ycbcr_props);
1164         }
1165         if (pProperties == nullptr) {
1166             *pPropertyCount = properties.size();
1167         } else {
1168             // return number of structures actually written to pProperties.
1169             *pPropertyCount = std::min((uint32_t)properties.size(), *pPropertyCount);
1170             memcpy(pProperties, properties.data(), *pPropertyCount * sizeof(VkExtensionProperties));
1171         }
1172         return *pPropertyCount < properties.size() ? VK_INCOMPLETE : VK_SUCCESS;
1173     }
1174 
on_vkCreateDevice(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)1175     VkResult on_vkCreateDevice(android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
1176                                const VkDeviceCreateInfo* pCreateInfo,
1177                                const VkAllocationCallbacks* pAllocator, VkDevice* pDevice) {
1178         if (mLogging) {
1179             fprintf(stderr, "%s: begin\n", __func__);
1180         }
1181 
1182         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1183         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1184 
1185         std::vector<const char*> finalExts =
1186             filteredDeviceExtensionNames(vk, physicalDevice, pCreateInfo->enabledExtensionCount,
1187                                          pCreateInfo->ppEnabledExtensionNames);
1188 
1189         // Run the underlying API call, filtering extensions.
1190         VkDeviceCreateInfo createInfoFiltered = *pCreateInfo;
1191         // According to the spec, it seems that the application can use compressed texture formats
1192         // without enabling the feature when creating the VkDevice, as long as
1193         // vkGetPhysicalDeviceFormatProperties and vkGetPhysicalDeviceImageFormatProperties reports
1194         // support: to query for additional properties, or if the feature is not enabled,
1195         // vkGetPhysicalDeviceFormatProperties and vkGetPhysicalDeviceImageFormatProperties can be
1196         // used to check for supported properties of individual formats as normal.
1197         bool emulateTextureEtc2 = needEmulatedEtc2(physicalDevice, vk);
1198         bool emulateTextureAstc = needEmulatedAstc(physicalDevice, vk);
1199         VkPhysicalDeviceFeatures featuresFiltered;
1200         std::vector<VkPhysicalDeviceFeatures*> featuresToFilter;
1201 
1202         if (pCreateInfo->pEnabledFeatures) {
1203             featuresFiltered = *pCreateInfo->pEnabledFeatures;
1204             createInfoFiltered.pEnabledFeatures = &featuresFiltered;
1205             featuresToFilter.emplace_back(&featuresFiltered);
1206         }
1207 
1208         if (VkPhysicalDeviceFeatures2* features2 =
1209                 vk_find_struct<VkPhysicalDeviceFeatures2>(&createInfoFiltered)) {
1210             featuresToFilter.emplace_back(&features2->features);
1211         }
1212 
1213         for (VkPhysicalDeviceFeatures* feature : featuresToFilter) {
1214             if (emulateTextureEtc2) {
1215                 feature->textureCompressionETC2 = VK_FALSE;
1216             }
1217             if (emulateTextureAstc) {
1218                 feature->textureCompressionASTC_LDR = VK_FALSE;
1219             }
1220         }
1221 
1222         if (auto* ycbcrFeatures = vk_find_struct<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(
1223                 &createInfoFiltered)) {
1224             if (m_emu->enableYcbcrEmulation && !m_emu->deviceInfo.supportsSamplerYcbcrConversion) {
1225                 ycbcrFeatures->samplerYcbcrConversion = VK_FALSE;
1226             }
1227         }
1228 
1229         createInfoFiltered.enabledExtensionCount = (uint32_t)finalExts.size();
1230         createInfoFiltered.ppEnabledExtensionNames = finalExts.data();
1231 
1232         // bug: 155795731
1233         bool swiftshader =
1234             (android::base::getEnvironmentVariable("ANDROID_EMU_VK_ICD").compare("swiftshader") ==
1235              0);
1236 
1237         std::unique_ptr<std::lock_guard<std::recursive_mutex>> lock = nullptr;
1238 
1239         if (swiftshader) {
1240             if (mLogging) {
1241                 fprintf(stderr, "%s: acquire lock\n", __func__);
1242             }
1243             lock = std::make_unique<std::lock_guard<std::recursive_mutex>>(mLock);
1244         }
1245 
1246         if (mLogging) {
1247             fprintf(stderr, "%s: got lock, calling host\n", __func__);
1248         }
1249 
1250         VkResult result =
1251             vk->vkCreateDevice(physicalDevice, &createInfoFiltered, pAllocator, pDevice);
1252 
1253         if (mLogging) {
1254             fprintf(stderr, "%s: host returned. result: %d\n", __func__, result);
1255         }
1256 
1257         if (result != VK_SUCCESS) return result;
1258 
1259         if (mLogging) {
1260             fprintf(stderr, "%s: track the new device (begin)\n", __func__);
1261         }
1262 
1263         if (!swiftshader) {
1264             lock = std::make_unique<std::lock_guard<std::recursive_mutex>>(mLock);
1265         }
1266 
1267         mDeviceToPhysicalDevice[*pDevice] = physicalDevice;
1268 
1269         // Fill out information about the logical device here.
1270         auto& deviceInfo = mDeviceInfo[*pDevice];
1271         deviceInfo.physicalDevice = physicalDevice;
1272         deviceInfo.emulateTextureEtc2 = emulateTextureEtc2;
1273         deviceInfo.emulateTextureAstc = emulateTextureAstc;
1274 
1275         for (uint32_t i = 0; i < createInfoFiltered.enabledExtensionCount; ++i) {
1276             deviceInfo.enabledExtensionNames.push_back(
1277                 createInfoFiltered.ppEnabledExtensionNames[i]);
1278         }
1279 
1280         // First, get the dispatch table.
1281         VkDevice boxed = new_boxed_VkDevice(*pDevice, nullptr, true /* own dispatch */);
1282 
1283         if (mLogging) {
1284             fprintf(stderr, "%s: init vulkan dispatch from device\n", __func__);
1285         }
1286 
1287         VulkanDispatch* dispatch = dispatch_VkDevice(boxed);
1288         init_vulkan_dispatch_from_device(vk, *pDevice, dispatch);
1289         if (m_emu->debugUtilsAvailableAndRequested) {
1290             deviceInfo.debugUtilsHelper = DebugUtilsHelper::withUtilsEnabled(*pDevice, dispatch);
1291         }
1292 
1293         deviceInfo.externalFencePool =
1294             std::make_unique<ExternalFencePool<VulkanDispatch>>(dispatch, *pDevice);
1295 
1296         if (mLogging) {
1297             fprintf(stderr, "%s: init vulkan dispatch from device (end)\n", __func__);
1298         }
1299 
1300         deviceInfo.boxed = boxed;
1301 
1302         // Next, get information about the queue families used by this device.
1303         std::unordered_map<uint32_t, uint32_t> queueFamilyIndexCounts;
1304         for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
1305             const auto& queueCreateInfo = pCreateInfo->pQueueCreateInfos[i];
1306             // Check only queues created with flags = 0 in VkDeviceQueueCreateInfo.
1307             auto flags = queueCreateInfo.flags;
1308             if (flags) continue;
1309             uint32_t queueFamilyIndex = queueCreateInfo.queueFamilyIndex;
1310             uint32_t queueCount = queueCreateInfo.queueCount;
1311             queueFamilyIndexCounts[queueFamilyIndex] = queueCount;
1312         }
1313 
1314         for (auto it : queueFamilyIndexCounts) {
1315             auto index = it.first;
1316             auto count = it.second;
1317             auto& queues = deviceInfo.queues[index];
1318             for (uint32_t i = 0; i < count; ++i) {
1319                 VkQueue queueOut;
1320 
1321                 if (mLogging) {
1322                     fprintf(stderr, "%s: get device queue (begin)\n", __func__);
1323                 }
1324 
1325                 vk->vkGetDeviceQueue(*pDevice, index, i, &queueOut);
1326 
1327                 if (mLogging) {
1328                     fprintf(stderr, "%s: get device queue (end)\n", __func__);
1329                 }
1330                 queues.push_back(queueOut);
1331                 mQueueInfo[queueOut].device = *pDevice;
1332                 mQueueInfo[queueOut].queueFamilyIndex = index;
1333 
1334                 auto boxed = new_boxed_VkQueue(queueOut, dispatch_VkDevice(deviceInfo.boxed),
1335                                                false /* does not own dispatch */);
1336                 mQueueInfo[queueOut].boxed = boxed;
1337                 mQueueInfo[queueOut].lock = new Lock;
1338             }
1339         }
1340 
1341         // Box the device.
1342         *pDevice = (VkDevice)deviceInfo.boxed;
1343 
1344         if (mLogging) {
1345             fprintf(stderr, "%s: (end)\n", __func__);
1346         }
1347 
1348         return VK_SUCCESS;
1349     }
1350 
on_vkGetDeviceQueue(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)1351     void on_vkGetDeviceQueue(android::base::BumpPool* pool, VkDevice boxed_device,
1352                              uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue) {
1353         auto device = unbox_VkDevice(boxed_device);
1354 
1355         std::lock_guard<std::recursive_mutex> lock(mLock);
1356 
1357         *pQueue = VK_NULL_HANDLE;
1358 
1359         auto* deviceInfo = android::base::find(mDeviceInfo, device);
1360         if (!deviceInfo) return;
1361 
1362         const auto& queues = deviceInfo->queues;
1363 
1364         const auto* queueList = android::base::find(queues, queueFamilyIndex);
1365         if (!queueList) return;
1366         if (queueIndex >= queueList->size()) return;
1367 
1368         VkQueue unboxedQueue = (*queueList)[queueIndex];
1369 
1370         auto* queueInfo = android::base::find(mQueueInfo, unboxedQueue);
1371         if (!queueInfo) return;
1372 
1373         *pQueue = (VkQueue)queueInfo->boxed;
1374     }
1375 
on_vkGetDeviceQueue2(android::base::BumpPool * pool,VkDevice boxed_device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)1376     void on_vkGetDeviceQueue2(android::base::BumpPool* pool, VkDevice boxed_device,
1377                               const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue) {
1378         // Protected memory is not supported on emulators. So we should
1379         // not return any queue if a client requests a protected device
1380         // queue.
1381         if (pQueueInfo->flags & VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT) {
1382             *pQueue = VK_NULL_HANDLE;
1383             fprintf(stderr, "%s: Cannot get protected Vulkan device queue\n", __func__);
1384             return;
1385         }
1386         uint32_t queueFamilyIndex = pQueueInfo->queueFamilyIndex;
1387         uint32_t queueIndex = pQueueInfo->queueIndex;
1388         on_vkGetDeviceQueue(pool, boxed_device, queueFamilyIndex, queueIndex, pQueue);
1389     }
1390 
destroyDeviceLocked(VkDevice device,const VkAllocationCallbacks * pAllocator)1391     void destroyDeviceLocked(VkDevice device, const VkAllocationCallbacks* pAllocator) {
1392         auto* deviceInfo = android::base::find(mDeviceInfo, device);
1393         if (!deviceInfo) return;
1394 
1395         auto eraseIt = mQueueInfo.begin();
1396         for (; eraseIt != mQueueInfo.end();) {
1397             if (eraseIt->second.device == device) {
1398                 delete eraseIt->second.lock;
1399                 delete_VkQueue(eraseIt->second.boxed);
1400                 eraseIt = mQueueInfo.erase(eraseIt);
1401             } else {
1402                 ++eraseIt;
1403             }
1404         }
1405 
1406         VulkanDispatch* deviceDispatch = dispatch_VkDevice(deviceInfo->boxed);
1407 
1408         // Destroy pooled external fences
1409         auto deviceFences = deviceInfo->externalFencePool->popAll();
1410         for (auto fence : deviceFences) {
1411             deviceDispatch->vkDestroyFence(device, fence, pAllocator);
1412             mFenceInfo.erase(fence);
1413         }
1414 
1415         for (auto fence : findDeviceObjects(device, mFenceInfo)) {
1416             deviceDispatch->vkDestroyFence(device, fence, pAllocator);
1417             mFenceInfo.erase(fence);
1418         }
1419 
1420         // Run the underlying API call.
1421         m_vk->vkDestroyDevice(device, pAllocator);
1422 
1423         delete_VkDevice(deviceInfo->boxed);
1424     }
1425 
on_vkDestroyDevice(android::base::BumpPool * pool,VkDevice boxed_device,const VkAllocationCallbacks * pAllocator)1426     void on_vkDestroyDevice(android::base::BumpPool* pool, VkDevice boxed_device,
1427                             const VkAllocationCallbacks* pAllocator) {
1428         auto device = unbox_VkDevice(boxed_device);
1429 
1430         std::lock_guard<std::recursive_mutex> lock(mLock);
1431 
1432         sBoxedHandleManager.processDelayedRemovesGlobalStateLocked(device);
1433         destroyDeviceLocked(device, pAllocator);
1434 
1435         mDeviceInfo.erase(device);
1436         mDeviceToPhysicalDevice.erase(device);
1437     }
1438 
on_vkCreateBuffer(android::base::BumpPool * pool,VkDevice boxed_device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)1439     VkResult on_vkCreateBuffer(android::base::BumpPool* pool, VkDevice boxed_device,
1440                                const VkBufferCreateInfo* pCreateInfo,
1441                                const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) {
1442         auto device = unbox_VkDevice(boxed_device);
1443         auto vk = dispatch_VkDevice(boxed_device);
1444 
1445         VkResult result = vk->vkCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
1446 
1447         if (result == VK_SUCCESS) {
1448             std::lock_guard<std::recursive_mutex> lock(mLock);
1449             auto& bufInfo = mBufferInfo[*pBuffer];
1450             bufInfo.device = device;
1451             bufInfo.size = pCreateInfo->size;
1452             *pBuffer = new_boxed_non_dispatchable_VkBuffer(*pBuffer);
1453         }
1454 
1455         return result;
1456     }
1457 
on_vkDestroyBuffer(android::base::BumpPool * pool,VkDevice boxed_device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)1458     void on_vkDestroyBuffer(android::base::BumpPool* pool, VkDevice boxed_device, VkBuffer buffer,
1459                             const VkAllocationCallbacks* pAllocator) {
1460         auto device = unbox_VkDevice(boxed_device);
1461         auto vk = dispatch_VkDevice(boxed_device);
1462 
1463         vk->vkDestroyBuffer(device, buffer, pAllocator);
1464 
1465         std::lock_guard<std::recursive_mutex> lock(mLock);
1466         mBufferInfo.erase(buffer);
1467     }
1468 
setBufferMemoryBindInfoLocked(VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)1469     void setBufferMemoryBindInfoLocked(VkBuffer buffer, VkDeviceMemory memory,
1470                                        VkDeviceSize memoryOffset) {
1471         auto* bufferInfo = android::base::find(mBufferInfo, buffer);
1472         if (!bufferInfo) return;
1473         bufferInfo->memory = memory;
1474         bufferInfo->memoryOffset = memoryOffset;
1475     }
1476 
on_vkBindBufferMemory(android::base::BumpPool * pool,VkDevice boxed_device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)1477     VkResult on_vkBindBufferMemory(android::base::BumpPool* pool, VkDevice boxed_device,
1478                                    VkBuffer buffer, VkDeviceMemory memory,
1479                                    VkDeviceSize memoryOffset) {
1480         auto device = unbox_VkDevice(boxed_device);
1481         auto vk = dispatch_VkDevice(boxed_device);
1482 
1483         VALIDATE_REQUIRED_HANDLE(memory);
1484         VkResult result = vk->vkBindBufferMemory(device, buffer, memory, memoryOffset);
1485 
1486         if (result == VK_SUCCESS) {
1487             std::lock_guard<std::recursive_mutex> lock(mLock);
1488             setBufferMemoryBindInfoLocked(buffer, memory, memoryOffset);
1489         }
1490         return result;
1491     }
1492 
on_vkBindBufferMemory2(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)1493     VkResult on_vkBindBufferMemory2(android::base::BumpPool* pool, VkDevice boxed_device,
1494                                     uint32_t bindInfoCount,
1495                                     const VkBindBufferMemoryInfo* pBindInfos) {
1496         auto device = unbox_VkDevice(boxed_device);
1497         auto vk = dispatch_VkDevice(boxed_device);
1498 
1499         for (uint32_t i = 0; i < bindInfoCount; ++i) {
1500             VALIDATE_REQUIRED_HANDLE(pBindInfos[i].memory);
1501         }
1502         VkResult result = vk->vkBindBufferMemory2(device, bindInfoCount, pBindInfos);
1503 
1504         if (result == VK_SUCCESS) {
1505             std::lock_guard<std::recursive_mutex> lock(mLock);
1506             for (uint32_t i = 0; i < bindInfoCount; ++i) {
1507                 setBufferMemoryBindInfoLocked(pBindInfos[i].buffer, pBindInfos[i].memory,
1508                                               pBindInfos[i].memoryOffset);
1509             }
1510         }
1511 
1512         return result;
1513     }
1514 
on_vkBindBufferMemory2KHR(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)1515     VkResult on_vkBindBufferMemory2KHR(android::base::BumpPool* pool, VkDevice boxed_device,
1516                                        uint32_t bindInfoCount,
1517                                        const VkBindBufferMemoryInfo* pBindInfos) {
1518         auto device = unbox_VkDevice(boxed_device);
1519         auto vk = dispatch_VkDevice(boxed_device);
1520 
1521         for (uint32_t i = 0; i < bindInfoCount; ++i) {
1522             VALIDATE_REQUIRED_HANDLE(pBindInfos[i].memory);
1523         }
1524         VkResult result = vk->vkBindBufferMemory2KHR(device, bindInfoCount, pBindInfos);
1525 
1526         if (result == VK_SUCCESS) {
1527             std::lock_guard<std::recursive_mutex> lock(mLock);
1528             for (uint32_t i = 0; i < bindInfoCount; ++i) {
1529                 setBufferMemoryBindInfoLocked(pBindInfos[i].buffer, pBindInfos[i].memory,
1530                                               pBindInfos[i].memoryOffset);
1531             }
1532         }
1533 
1534         return result;
1535     }
1536 
on_vkCreateImage(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)1537     VkResult on_vkCreateImage(android::base::BumpPool* pool, VkDevice boxed_device,
1538                               const VkImageCreateInfo* pCreateInfo,
1539                               const VkAllocationCallbacks* pAllocator, VkImage* pImage) {
1540         auto device = unbox_VkDevice(boxed_device);
1541         auto vk = dispatch_VkDevice(boxed_device);
1542 
1543         std::lock_guard<std::recursive_mutex> lock(mLock);
1544 
1545         auto* deviceInfo = android::base::find(mDeviceInfo, device);
1546         if (!deviceInfo) {
1547             return VK_ERROR_OUT_OF_HOST_MEMORY;
1548         }
1549 
1550         const bool needDecompression = deviceInfo->needEmulatedDecompression(pCreateInfo->format);
1551         CompressedImageInfo cmpInfo = needDecompression ? CompressedImageInfo(device, *pCreateInfo)
1552                                                         : CompressedImageInfo(device);
1553         VkImageCreateInfo decompInfo;
1554         if (needDecompression) {
1555             decompInfo = cmpInfo.getDecompressedCreateInfo(*pCreateInfo);
1556             pCreateInfo = &decompInfo;
1557         }
1558 
1559         auto anbInfo = std::make_unique<AndroidNativeBufferInfo>();
1560         const VkNativeBufferANDROID* nativeBufferANDROID =
1561             vk_find_struct<VkNativeBufferANDROID>(pCreateInfo);
1562 
1563         VkResult createRes = VK_SUCCESS;
1564 
1565         if (nativeBufferANDROID) {
1566             auto memProps = memPropsOfDeviceLocked(device);
1567 
1568             createRes =
1569                 prepareAndroidNativeBufferImage(vk, device, *pool, pCreateInfo, nativeBufferANDROID,
1570                                                 pAllocator, memProps, anbInfo.get());
1571             if (createRes == VK_SUCCESS) {
1572                 *pImage = anbInfo->image;
1573             }
1574         } else {
1575             createRes = vk->vkCreateImage(device, pCreateInfo, pAllocator, pImage);
1576         }
1577 
1578         if (createRes != VK_SUCCESS) return createRes;
1579 
1580         if (needDecompression) {
1581             cmpInfo.setDecompressedImage(*pImage);
1582             cmpInfo.createCompressedMipmapImages(vk, *pCreateInfo);
1583 
1584             if (cmpInfo.isAstc()) {
1585                 VkInstance* instance = deviceToInstanceLocked(device);
1586                 InstanceInfo* instanceInfo = android::base::find(mInstanceInfo, *instance);
1587                 if (instanceInfo && instanceInfo->useAstcCpuDecompression) {
1588                     cmpInfo.initAstcCpuDecompression(m_vk, mDeviceInfo[device].physicalDevice);
1589                 }
1590             }
1591         }
1592 
1593         auto& imageInfo = mImageInfo[*pImage];
1594         imageInfo.device = device;
1595         imageInfo.cmpInfo = std::move(cmpInfo);
1596         if (nativeBufferANDROID) imageInfo.anbInfo = std::move(anbInfo);
1597 
1598         *pImage = new_boxed_non_dispatchable_VkImage(*pImage);
1599         return createRes;
1600     }
1601 
destroyImageLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkImage image,const VkAllocationCallbacks * pAllocator)1602     void destroyImageLocked(VkDevice device, VulkanDispatch* deviceDispatch, VkImage image,
1603                             const VkAllocationCallbacks* pAllocator) {
1604         auto* imageInfo = android::base::find(mImageInfo, image);
1605         if (!imageInfo) return;
1606 
1607         if (!imageInfo->anbInfo) {
1608             imageInfo->cmpInfo.destroy(deviceDispatch);
1609             if (image != imageInfo->cmpInfo.decompressedImage()) {
1610                 deviceDispatch->vkDestroyImage(device, image, pAllocator);
1611             }
1612         }
1613         mImageInfo.erase(image);
1614     }
1615 
on_vkDestroyImage(android::base::BumpPool * pool,VkDevice boxed_device,VkImage image,const VkAllocationCallbacks * pAllocator)1616     void on_vkDestroyImage(android::base::BumpPool* pool, VkDevice boxed_device, VkImage image,
1617                            const VkAllocationCallbacks* pAllocator) {
1618         auto device = unbox_VkDevice(boxed_device);
1619         auto deviceDispatch = dispatch_VkDevice(boxed_device);
1620 
1621         std::lock_guard<std::recursive_mutex> lock(mLock);
1622         destroyImageLocked(device, deviceDispatch, image, pAllocator);
1623     }
1624 
performBindImageMemoryDeferredAhb(android::base::BumpPool * pool,VkDevice boxed_device,const VkBindImageMemoryInfo * bimi)1625     VkResult performBindImageMemoryDeferredAhb(android::base::BumpPool* pool,
1626                                                VkDevice boxed_device,
1627                                                const VkBindImageMemoryInfo* bimi) {
1628         auto device = unbox_VkDevice(boxed_device);
1629         auto vk = dispatch_VkDevice(boxed_device);
1630 
1631         auto original_underlying_image = bimi->image;
1632         auto original_boxed_image = unboxed_to_boxed_non_dispatchable_VkImage(original_underlying_image);
1633 
1634         VkImageCreateInfo ici = {};
1635         {
1636             std::lock_guard<std::recursive_mutex> lock(mLock);
1637 
1638             auto* imageInfo = android::base::find(mImageInfo, original_underlying_image);
1639             if (!imageInfo) {
1640                 ERR("Image for deferred AHB bind does not exist.");
1641                 return VK_ERROR_OUT_OF_HOST_MEMORY;
1642             }
1643 
1644             ici = imageInfo->imageCreateInfoShallow;
1645         }
1646 
1647         ici.pNext = vk_find_struct<VkNativeBufferANDROID>(bimi);
1648         if (!ici.pNext) {
1649             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
1650                 << "Missing VkNativeBufferANDROID for deferred AHB bind.";
1651         }
1652 
1653         VkImage boxed_replacement_image = VK_NULL_HANDLE;
1654         VkResult result = on_vkCreateImage(pool, boxed_device, &ici, nullptr, &boxed_replacement_image);
1655         if (result != VK_SUCCESS) {
1656             ERR("Failed to create image for deferred AHB bind.");
1657             return VK_ERROR_OUT_OF_HOST_MEMORY;
1658         }
1659 
1660         on_vkDestroyImage(pool, boxed_device, original_underlying_image, nullptr);
1661 
1662         {
1663             std::lock_guard<std::recursive_mutex> lock(mLock);
1664 
1665             auto underlying_replacement_image = unbox_VkImage(boxed_replacement_image);
1666             delete_VkImage(boxed_replacement_image);
1667             set_boxed_non_dispatchable_VkImage(original_boxed_image, underlying_replacement_image);
1668         }
1669 
1670         return VK_SUCCESS;
1671     }
1672 
performBindImageMemory(android::base::BumpPool * pool,VkDevice boxed_device,const VkBindImageMemoryInfo * bimi)1673     VkResult performBindImageMemory(android::base::BumpPool* pool, VkDevice boxed_device,
1674                                     const VkBindImageMemoryInfo* bimi) {
1675         auto image = bimi->image;
1676         auto memory = bimi->memory;
1677         auto memoryOffset = bimi->memoryOffset;
1678 
1679         const auto* anb = vk_find_struct<VkNativeBufferANDROID>(bimi);
1680         if (memory == VK_NULL_HANDLE && anb != nullptr) {
1681             return performBindImageMemoryDeferredAhb(pool, boxed_device, bimi);
1682         }
1683         auto device = unbox_VkDevice(boxed_device);
1684         auto vk = dispatch_VkDevice(boxed_device);
1685 
1686         VALIDATE_REQUIRED_HANDLE(memory);
1687         VkResult result = vk->vkBindImageMemory(device, image, memory, memoryOffset);
1688         if (result != VK_SUCCESS) {
1689             return result;
1690         }
1691         std::lock_guard<std::recursive_mutex> lock(mLock);
1692         auto* deviceInfo = android::base::find(mDeviceInfo, device);
1693         if (!deviceInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
1694         auto* memoryInfo = android::base::find(mMemoryInfo, memory);
1695         if (!memoryInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
1696 #ifdef VK_MVK_moltenvk
1697         if (memoryInfo->mtlTexture) {
1698             result = m_vk->vkSetMTLTextureMVK(image, memoryInfo->mtlTexture);
1699             if (result != VK_SUCCESS) {
1700                 fprintf(stderr, "vkSetMTLTexture failed\n");
1701                 return VK_ERROR_OUT_OF_HOST_MEMORY;
1702             }
1703         }
1704 #endif
1705         if (!deviceInfo->emulateTextureEtc2 && !deviceInfo->emulateTextureAstc) {
1706             return VK_SUCCESS;
1707         }
1708         auto* imageInfo = android::base::find(mImageInfo, image);
1709         if (!imageInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
1710         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
1711         if (!deviceInfo->needEmulatedDecompression(cmpInfo)) {
1712             return VK_SUCCESS;
1713         }
1714         return cmpInfo.bindCompressedMipmapsMemory(vk, memory, memoryOffset);
1715     }
1716 
on_vkBindImageMemory(android::base::BumpPool * pool,VkDevice boxed_device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)1717     VkResult on_vkBindImageMemory(android::base::BumpPool* pool, VkDevice boxed_device,
1718                                   VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) {
1719         const VkBindImageMemoryInfo bimi = {
1720             .sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
1721             .pNext = nullptr,
1722             .image = image,
1723             .memory = memory,
1724             .memoryOffset = memoryOffset,
1725         };
1726         return performBindImageMemory(pool, boxed_device, &bimi);
1727     }
1728 
on_vkBindImageMemory2(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)1729     VkResult on_vkBindImageMemory2(android::base::BumpPool* pool, VkDevice boxed_device,
1730                                    uint32_t bindInfoCount,
1731                                    const VkBindImageMemoryInfo* pBindInfos) {
1732         auto device = unbox_VkDevice(boxed_device);
1733         auto vk = dispatch_VkDevice(boxed_device);
1734         bool needEmulation = false;
1735 
1736         auto* deviceInfo = android::base::find(mDeviceInfo, device);
1737         if (!deviceInfo) return VK_ERROR_UNKNOWN;
1738 
1739         for (uint32_t i = 0; i < bindInfoCount; i++) {
1740             auto* imageInfo = android::base::find(mImageInfo, pBindInfos[i].image);
1741             if (!imageInfo) return VK_ERROR_UNKNOWN;
1742 
1743             const auto* anb = vk_find_struct<VkNativeBufferANDROID>(&pBindInfos[i]);
1744             if (anb != nullptr) {
1745                 needEmulation = true;
1746                 break;
1747             }
1748 
1749             if (deviceInfo->needEmulatedDecompression(imageInfo->cmpInfo)) {
1750                 needEmulation = true;
1751                 break;
1752             }
1753         }
1754 
1755         if (needEmulation) {
1756             VkResult result;
1757             for (uint32_t i = 0; i < bindInfoCount; i++) {
1758                 result = performBindImageMemory(pool, boxed_device, &pBindInfos[i]);
1759 
1760                 if (result != VK_SUCCESS) return result;
1761             }
1762 
1763             return VK_SUCCESS;
1764         }
1765 
1766         return vk->vkBindImageMemory2(device, bindInfoCount, pBindInfos);
1767     }
1768 
on_vkCreateImageView(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)1769     VkResult on_vkCreateImageView(android::base::BumpPool* pool, VkDevice boxed_device,
1770                                   const VkImageViewCreateInfo* pCreateInfo,
1771                                   const VkAllocationCallbacks* pAllocator, VkImageView* pView) {
1772         auto device = unbox_VkDevice(boxed_device);
1773         auto vk = dispatch_VkDevice(boxed_device);
1774 
1775         if (!pCreateInfo) {
1776             return VK_ERROR_OUT_OF_HOST_MEMORY;
1777         }
1778 
1779         std::lock_guard<std::recursive_mutex> lock(mLock);
1780         auto* deviceInfo = android::base::find(mDeviceInfo, device);
1781         auto* imageInfo = android::base::find(mImageInfo, pCreateInfo->image);
1782         if (!deviceInfo || !imageInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
1783         VkImageViewCreateInfo createInfo;
1784         bool needEmulatedAlpha = false;
1785         if (deviceInfo->needEmulatedDecompression(pCreateInfo->format)) {
1786             if (imageInfo->cmpInfo.decompressedImage()) {
1787                 createInfo = *pCreateInfo;
1788                 createInfo.format = CompressedImageInfo::getDecompressedFormat(pCreateInfo->format);
1789                 needEmulatedAlpha = CompressedImageInfo::needEmulatedAlpha(pCreateInfo->format);
1790                 createInfo.image = imageInfo->cmpInfo.decompressedImage();
1791                 pCreateInfo = &createInfo;
1792             }
1793         } else if (deviceInfo->needEmulatedDecompression(imageInfo->cmpInfo)) {
1794             // Image view on the compressed mipmaps
1795             createInfo = *pCreateInfo;
1796             createInfo.format =
1797                 CompressedImageInfo::getCompressedMipmapsFormat(pCreateInfo->format);
1798             needEmulatedAlpha = false;
1799             createInfo.image =
1800                 imageInfo->cmpInfo.compressedMipmap(pCreateInfo->subresourceRange.baseMipLevel);
1801             createInfo.subresourceRange.baseMipLevel = 0;
1802             pCreateInfo = &createInfo;
1803         }
1804         if (imageInfo->anbInfo && imageInfo->anbInfo->externallyBacked) {
1805             createInfo = *pCreateInfo;
1806             pCreateInfo = &createInfo;
1807         }
1808 
1809         VkResult result = vk->vkCreateImageView(device, pCreateInfo, pAllocator, pView);
1810         if (result != VK_SUCCESS) {
1811             return result;
1812         }
1813 
1814         auto& imageViewInfo = mImageViewInfo[*pView];
1815         imageViewInfo.device = device;
1816         imageViewInfo.needEmulatedAlpha = needEmulatedAlpha;
1817 
1818         *pView = new_boxed_non_dispatchable_VkImageView(*pView);
1819 
1820         return result;
1821     }
1822 
on_vkDestroyImageView(android::base::BumpPool * pool,VkDevice boxed_device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)1823     void on_vkDestroyImageView(android::base::BumpPool* pool, VkDevice boxed_device,
1824                                VkImageView imageView, const VkAllocationCallbacks* pAllocator) {
1825         auto device = unbox_VkDevice(boxed_device);
1826         auto vk = dispatch_VkDevice(boxed_device);
1827 
1828         vk->vkDestroyImageView(device, imageView, pAllocator);
1829         std::lock_guard<std::recursive_mutex> lock(mLock);
1830         mImageViewInfo.erase(imageView);
1831     }
1832 
on_vkCreateSampler(android::base::BumpPool * pool,VkDevice boxed_device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)1833     VkResult on_vkCreateSampler(android::base::BumpPool* pool, VkDevice boxed_device,
1834                                 const VkSamplerCreateInfo* pCreateInfo,
1835                                 const VkAllocationCallbacks* pAllocator, VkSampler* pSampler) {
1836         auto device = unbox_VkDevice(boxed_device);
1837         auto vk = dispatch_VkDevice(boxed_device);
1838         VkResult result = vk->vkCreateSampler(device, pCreateInfo, pAllocator, pSampler);
1839         if (result != VK_SUCCESS) {
1840             return result;
1841         }
1842         std::lock_guard<std::recursive_mutex> lock(mLock);
1843         auto& samplerInfo = mSamplerInfo[*pSampler];
1844         samplerInfo.device = device;
1845         deepcopy_VkSamplerCreateInfo(&samplerInfo.pool, VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
1846                                      pCreateInfo, &samplerInfo.createInfo);
1847         // We emulate RGB with RGBA for some compressed textures, which does not
1848         // handle translarent border correctly.
1849         samplerInfo.needEmulatedAlpha =
1850             (pCreateInfo->addressModeU == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER ||
1851              pCreateInfo->addressModeV == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER ||
1852              pCreateInfo->addressModeW == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER) &&
1853             (pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK ||
1854              pCreateInfo->borderColor == VK_BORDER_COLOR_INT_TRANSPARENT_BLACK ||
1855              pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT ||
1856              pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT);
1857 
1858         *pSampler = new_boxed_non_dispatchable_VkSampler(*pSampler);
1859 
1860         return result;
1861     }
1862 
destroySamplerLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkSampler sampler,const VkAllocationCallbacks * pAllocator)1863     void destroySamplerLocked(VkDevice device, VulkanDispatch* deviceDispatch, VkSampler sampler,
1864                               const VkAllocationCallbacks* pAllocator) {
1865         deviceDispatch->vkDestroySampler(device, sampler, pAllocator);
1866 
1867         auto* samplerInfo = android::base::find(mSamplerInfo, sampler);
1868         if (!samplerInfo) return;
1869 
1870         if (samplerInfo->emulatedborderSampler != VK_NULL_HANDLE) {
1871             deviceDispatch->vkDestroySampler(device, samplerInfo->emulatedborderSampler, nullptr);
1872         }
1873         mSamplerInfo.erase(sampler);
1874     }
1875 
on_vkDestroySampler(android::base::BumpPool * pool,VkDevice boxed_device,VkSampler sampler,const VkAllocationCallbacks * pAllocator)1876     void on_vkDestroySampler(android::base::BumpPool* pool, VkDevice boxed_device,
1877                              VkSampler sampler, const VkAllocationCallbacks* pAllocator) {
1878         auto device = unbox_VkDevice(boxed_device);
1879         auto deviceDispatch = dispatch_VkDevice(boxed_device);
1880 
1881         std::lock_guard<std::recursive_mutex> lock(mLock);
1882         destroySamplerLocked(device, deviceDispatch, sampler, pAllocator);
1883     }
1884 
on_vkCreateSemaphore(android::base::BumpPool * pool,VkDevice boxed_device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)1885     VkResult on_vkCreateSemaphore(android::base::BumpPool* pool, VkDevice boxed_device,
1886                                   const VkSemaphoreCreateInfo* pCreateInfo,
1887                                   const VkAllocationCallbacks* pAllocator,
1888                                   VkSemaphore* pSemaphore) {
1889         auto device = unbox_VkDevice(boxed_device);
1890         auto vk = dispatch_VkDevice(boxed_device);
1891 
1892         VkSemaphoreCreateInfo localCreateInfo = vk_make_orphan_copy(*pCreateInfo);
1893         vk_struct_chain_iterator structChainIter = vk_make_chain_iterator(&localCreateInfo);
1894 
1895         VkSemaphoreTypeCreateInfoKHR localSemaphoreTypeCreateInfo;
1896         if (const VkSemaphoreTypeCreateInfoKHR* semaphoreTypeCiPtr =
1897                 vk_find_struct<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo);
1898             semaphoreTypeCiPtr) {
1899             localSemaphoreTypeCreateInfo = vk_make_orphan_copy(*semaphoreTypeCiPtr);
1900             vk_append_struct(&structChainIter, &localSemaphoreTypeCreateInfo);
1901         }
1902 
1903         const VkExportSemaphoreCreateInfoKHR* exportCiPtr =
1904             vk_find_struct<VkExportSemaphoreCreateInfoKHR>(pCreateInfo);
1905         VkExportSemaphoreCreateInfoKHR localSemaphoreCreateInfo;
1906 
1907         if (exportCiPtr) {
1908             localSemaphoreCreateInfo = vk_make_orphan_copy(*exportCiPtr);
1909 
1910 #ifdef _WIN32
1911             if (localSemaphoreCreateInfo.handleTypes) {
1912                 localSemaphoreCreateInfo.handleTypes =
1913                     VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR;
1914             }
1915 #endif
1916 
1917             vk_append_struct(&structChainIter, &localSemaphoreCreateInfo);
1918         }
1919 
1920         VkResult res = vk->vkCreateSemaphore(device, &localCreateInfo, pAllocator, pSemaphore);
1921 
1922         if (res != VK_SUCCESS) return res;
1923 
1924         std::lock_guard<std::recursive_mutex> lock(mLock);
1925 
1926         auto& semaphoreInfo = mSemaphoreInfo[*pSemaphore];
1927         semaphoreInfo.device = device;
1928 
1929         *pSemaphore = new_boxed_non_dispatchable_VkSemaphore(*pSemaphore);
1930 
1931         return res;
1932     }
1933 
on_vkCreateFence(android::base::BumpPool * pool,VkDevice boxed_device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)1934     VkResult on_vkCreateFence(android::base::BumpPool* pool, VkDevice boxed_device,
1935                               const VkFenceCreateInfo* pCreateInfo,
1936                               const VkAllocationCallbacks* pAllocator, VkFence* pFence) {
1937         auto device = unbox_VkDevice(boxed_device);
1938         auto vk = dispatch_VkDevice(boxed_device);
1939 
1940         VkFenceCreateInfo& createInfo = const_cast<VkFenceCreateInfo&>(*pCreateInfo);
1941 
1942         const VkExportFenceCreateInfo* exportFenceInfoPtr =
1943             vk_find_struct<VkExportFenceCreateInfo>(pCreateInfo);
1944         bool exportSyncFd = exportFenceInfoPtr && (exportFenceInfoPtr->handleTypes &
1945                                                    VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT);
1946         bool fenceReused = false;
1947 
1948         *pFence = VK_NULL_HANDLE;
1949 
1950         if (exportSyncFd) {
1951             // Remove VkExportFenceCreateInfo, since host doesn't need to create
1952             // an exportable fence in this case
1953             ExternalFencePool<VulkanDispatch>* externalFencePool = nullptr;
1954             vk_struct_chain_remove(exportFenceInfoPtr, &createInfo);
1955             {
1956                 std::lock_guard<std::recursive_mutex> lock(mLock);
1957                 auto* deviceInfo = android::base::find(mDeviceInfo, device);
1958                 if (!deviceInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
1959                 externalFencePool = deviceInfo->externalFencePool.get();
1960             }
1961             *pFence = externalFencePool->pop(pCreateInfo);
1962             if (*pFence != VK_NULL_HANDLE) {
1963                 fenceReused = true;
1964             }
1965         }
1966 
1967         if (*pFence == VK_NULL_HANDLE) {
1968             VkResult res = vk->vkCreateFence(device, &createInfo, pAllocator, pFence);
1969             if (res != VK_SUCCESS) {
1970                 return res;
1971             }
1972         }
1973 
1974         {
1975             std::lock_guard<std::recursive_mutex> lock(mLock);
1976 
1977             DCHECK(fenceReused || mFenceInfo.find(*pFence) == mFenceInfo.end());
1978             // Create FenceInfo for *pFence.
1979             auto& fenceInfo = mFenceInfo[*pFence];
1980             fenceInfo.device = device;
1981             fenceInfo.vk = vk;
1982 
1983             *pFence = new_boxed_non_dispatchable_VkFence(*pFence);
1984             fenceInfo.boxed = *pFence;
1985             fenceInfo.external = exportSyncFd;
1986             fenceInfo.state = FenceInfo::State::kNotWaitable;
1987         }
1988 
1989         return VK_SUCCESS;
1990     }
1991 
on_vkResetFences(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t fenceCount,const VkFence * pFences)1992     VkResult on_vkResetFences(android::base::BumpPool* pool, VkDevice boxed_device,
1993                               uint32_t fenceCount, const VkFence* pFences) {
1994         auto device = unbox_VkDevice(boxed_device);
1995         auto vk = dispatch_VkDevice(boxed_device);
1996 
1997         std::vector<VkFence> cleanedFences;
1998         std::vector<VkFence> externalFences;
1999 
2000         {
2001             std::lock_guard<std::recursive_mutex> lock(mLock);
2002             for (uint32_t i = 0; i < fenceCount; i++) {
2003                 if (pFences[i] == VK_NULL_HANDLE) continue;
2004 
2005                 DCHECK(mFenceInfo.find(pFences[i]) != mFenceInfo.end());
2006                 if (mFenceInfo[pFences[i]].external) {
2007                     externalFences.push_back(pFences[i]);
2008                 } else {
2009                     // Reset all fences' states to kNotWaitable.
2010                     cleanedFences.push_back(pFences[i]);
2011                     mFenceInfo[pFences[i]].state = FenceInfo::State::kNotWaitable;
2012                 }
2013             }
2014         }
2015 
2016         VK_CHECK(vk->vkResetFences(device, (uint32_t)cleanedFences.size(), cleanedFences.data()));
2017 
2018         // For external fences, we unilaterally put them in the pool to ensure they finish
2019         // TODO: should store creation info / pNext chain per fence and re-apply?
2020         VkFenceCreateInfo createInfo{
2021             .sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, .pNext = 0, .flags = 0};
2022         auto* deviceInfo = android::base::find(mDeviceInfo, device);
2023         if (!deviceInfo) return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2024         for (auto fence : externalFences) {
2025             VkFence replacement = deviceInfo->externalFencePool->pop(&createInfo);
2026             if (replacement == VK_NULL_HANDLE) {
2027                 VK_CHECK(vk->vkCreateFence(device, &createInfo, 0, &replacement));
2028             }
2029             deviceInfo->externalFencePool->add(fence);
2030 
2031             {
2032                 std::lock_guard<std::recursive_mutex> lock(mLock);
2033                 auto boxed_fence = unboxed_to_boxed_non_dispatchable_VkFence(fence);
2034                 delete_VkFence(boxed_fence);
2035                 set_boxed_non_dispatchable_VkFence(boxed_fence, replacement);
2036 
2037                 auto& fenceInfo = mFenceInfo[replacement];
2038                 fenceInfo.device = device;
2039                 fenceInfo.vk = vk;
2040                 fenceInfo.boxed = boxed_fence;
2041                 fenceInfo.external = true;
2042                 fenceInfo.state = FenceInfo::State::kNotWaitable;
2043 
2044                 mFenceInfo[fence].boxed = VK_NULL_HANDLE;
2045             }
2046         }
2047 
2048         return VK_SUCCESS;
2049     }
2050 
on_vkImportSemaphoreFdKHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo)2051     VkResult on_vkImportSemaphoreFdKHR(android::base::BumpPool* pool, VkDevice boxed_device,
2052                                        const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
2053         auto device = unbox_VkDevice(boxed_device);
2054         auto vk = dispatch_VkDevice(boxed_device);
2055 
2056 #ifdef _WIN32
2057         std::lock_guard<std::recursive_mutex> lock(mLock);
2058 
2059         auto* infoPtr = android::base::find(mSemaphoreInfo,
2060                                             mExternalSemaphoresById[pImportSemaphoreFdInfo->fd]);
2061 
2062         if (!infoPtr) {
2063             return VK_ERROR_INVALID_EXTERNAL_HANDLE;
2064         }
2065 
2066         VK_EXT_MEMORY_HANDLE handle = dupExternalMemory(infoPtr->externalHandle);
2067 
2068         VkImportSemaphoreWin32HandleInfoKHR win32ImportInfo = {
2069             VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
2070             0,
2071             pImportSemaphoreFdInfo->semaphore,
2072             pImportSemaphoreFdInfo->flags,
2073             VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR,
2074             handle,
2075             L"",
2076         };
2077 
2078         return vk->vkImportSemaphoreWin32HandleKHR(device, &win32ImportInfo);
2079 #else
2080         VkImportSemaphoreFdInfoKHR importInfo = *pImportSemaphoreFdInfo;
2081         importInfo.fd = dup(pImportSemaphoreFdInfo->fd);
2082         return vk->vkImportSemaphoreFdKHR(device, &importInfo);
2083 #endif
2084     }
2085 
on_vkGetSemaphoreFdKHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)2086     VkResult on_vkGetSemaphoreFdKHR(android::base::BumpPool* pool, VkDevice boxed_device,
2087                                     const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd) {
2088         auto device = unbox_VkDevice(boxed_device);
2089         auto vk = dispatch_VkDevice(boxed_device);
2090 #ifdef _WIN32
2091         VkSemaphoreGetWin32HandleInfoKHR getWin32 = {
2092             VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR,
2093             0,
2094             pGetFdInfo->semaphore,
2095             VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
2096         };
2097         VK_EXT_MEMORY_HANDLE handle;
2098         VkResult result = vk->vkGetSemaphoreWin32HandleKHR(device, &getWin32, &handle);
2099         if (result != VK_SUCCESS) {
2100             return result;
2101         }
2102         std::lock_guard<std::recursive_mutex> lock(mLock);
2103         mSemaphoreInfo[pGetFdInfo->semaphore].externalHandle = handle;
2104         int nextId = genSemaphoreId();
2105         mExternalSemaphoresById[nextId] = pGetFdInfo->semaphore;
2106         *pFd = nextId;
2107 #else
2108         VkResult result = vk->vkGetSemaphoreFdKHR(device, pGetFdInfo, pFd);
2109         if (result != VK_SUCCESS) {
2110             return result;
2111         }
2112 
2113         std::lock_guard<std::recursive_mutex> lock(mLock);
2114 
2115         mSemaphoreInfo[pGetFdInfo->semaphore].externalHandle = *pFd;
2116         // No next id; its already an fd
2117 #endif
2118         return result;
2119     }
2120 
destroySemaphoreLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)2121     void destroySemaphoreLocked(VkDevice device, VulkanDispatch* deviceDispatch,
2122                                 VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) {
2123 #ifndef _WIN32
2124         const auto& ite = mSemaphoreInfo.find(semaphore);
2125         if (ite != mSemaphoreInfo.end() &&
2126             (ite->second.externalHandle != VK_EXT_MEMORY_HANDLE_INVALID)) {
2127             close(ite->second.externalHandle);
2128         }
2129 #endif
2130         deviceDispatch->vkDestroySemaphore(device, semaphore, pAllocator);
2131 
2132         mSemaphoreInfo.erase(semaphore);
2133     }
2134 
on_vkDestroySemaphore(android::base::BumpPool * pool,VkDevice boxed_device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)2135     void on_vkDestroySemaphore(android::base::BumpPool* pool, VkDevice boxed_device,
2136                                VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) {
2137         auto device = unbox_VkDevice(boxed_device);
2138         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2139 
2140         std::lock_guard<std::recursive_mutex> lock(mLock);
2141         destroySemaphoreLocked(device, deviceDispatch, semaphore, pAllocator);
2142     }
2143 
on_vkDestroyFence(android::base::BumpPool * pool,VkDevice boxed_device,VkFence fence,const VkAllocationCallbacks * pAllocator)2144     void on_vkDestroyFence(android::base::BumpPool* pool, VkDevice boxed_device, VkFence fence,
2145                            const VkAllocationCallbacks* pAllocator) {
2146         auto device = unbox_VkDevice(boxed_device);
2147         auto vk = dispatch_VkDevice(boxed_device);
2148 
2149         {
2150             std::lock_guard<std::recursive_mutex> lock(mLock);
2151             // External fences are just slated for recycling. This addresses known
2152             // behavior where the guest might destroy the fence prematurely. b/228221208
2153             if (mFenceInfo[fence].external) {
2154                 auto* deviceInfo = android::base::find(mDeviceInfo, device);
2155                 if (deviceInfo) {
2156                     deviceInfo->externalFencePool->add(fence);
2157                     mFenceInfo[fence].boxed = VK_NULL_HANDLE;
2158                     return;
2159                 }
2160             }
2161             mFenceInfo.erase(fence);
2162         }
2163 
2164         vk->vkDestroyFence(device, fence, pAllocator);
2165     }
2166 
on_vkCreateDescriptorSetLayout(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)2167     VkResult on_vkCreateDescriptorSetLayout(android::base::BumpPool* pool, VkDevice boxed_device,
2168                                             const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
2169                                             const VkAllocationCallbacks* pAllocator,
2170                                             VkDescriptorSetLayout* pSetLayout) {
2171         auto device = unbox_VkDevice(boxed_device);
2172         auto vk = dispatch_VkDevice(boxed_device);
2173 
2174         auto res = vk->vkCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
2175 
2176         if (res == VK_SUCCESS) {
2177             std::lock_guard<std::recursive_mutex> lock(mLock);
2178             auto& info = mDescriptorSetLayoutInfo[*pSetLayout];
2179             info.device = device;
2180             *pSetLayout = new_boxed_non_dispatchable_VkDescriptorSetLayout(*pSetLayout);
2181             info.boxed = *pSetLayout;
2182 
2183             info.createInfo = *pCreateInfo;
2184             for (uint32_t i = 0; i < pCreateInfo->bindingCount; ++i) {
2185                 info.bindings.push_back(pCreateInfo->pBindings[i]);
2186             }
2187         }
2188 
2189         return res;
2190     }
2191 
on_vkDestroyDescriptorSetLayout(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)2192     void on_vkDestroyDescriptorSetLayout(android::base::BumpPool* pool, VkDevice boxed_device,
2193                                          VkDescriptorSetLayout descriptorSetLayout,
2194                                          const VkAllocationCallbacks* pAllocator) {
2195         auto device = unbox_VkDevice(boxed_device);
2196         auto vk = dispatch_VkDevice(boxed_device);
2197 
2198         vk->vkDestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
2199 
2200         std::lock_guard<std::recursive_mutex> lock(mLock);
2201         mDescriptorSetLayoutInfo.erase(descriptorSetLayout);
2202     }
2203 
on_vkCreateDescriptorPool(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)2204     VkResult on_vkCreateDescriptorPool(android::base::BumpPool* pool, VkDevice boxed_device,
2205                                        const VkDescriptorPoolCreateInfo* pCreateInfo,
2206                                        const VkAllocationCallbacks* pAllocator,
2207                                        VkDescriptorPool* pDescriptorPool) {
2208         auto device = unbox_VkDevice(boxed_device);
2209         auto vk = dispatch_VkDevice(boxed_device);
2210 
2211         auto res = vk->vkCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
2212 
2213         if (res == VK_SUCCESS) {
2214             std::lock_guard<std::recursive_mutex> lock(mLock);
2215             auto& info = mDescriptorPoolInfo[*pDescriptorPool];
2216             info.device = device;
2217             *pDescriptorPool = new_boxed_non_dispatchable_VkDescriptorPool(*pDescriptorPool);
2218             info.boxed = *pDescriptorPool;
2219             info.createInfo = *pCreateInfo;
2220             info.maxSets = pCreateInfo->maxSets;
2221             info.usedSets = 0;
2222 
2223             for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; ++i) {
2224                 DescriptorPoolInfo::PoolState state;
2225                 state.type = pCreateInfo->pPoolSizes[i].type;
2226                 state.descriptorCount = pCreateInfo->pPoolSizes[i].descriptorCount;
2227                 state.used = 0;
2228                 info.pools.push_back(state);
2229             }
2230 
2231             if (feature_is_enabled(kFeature_VulkanBatchedDescriptorSetUpdate)) {
2232                 for (uint32_t i = 0; i < pCreateInfo->maxSets; ++i) {
2233                     info.poolIds.push_back(
2234                         (uint64_t)new_boxed_non_dispatchable_VkDescriptorSet(VK_NULL_HANDLE));
2235                 }
2236             }
2237         }
2238 
2239         return res;
2240     }
2241 
cleanupDescriptorPoolAllocedSetsLocked(VkDescriptorPool descriptorPool,bool isDestroy=false)2242     void cleanupDescriptorPoolAllocedSetsLocked(VkDescriptorPool descriptorPool,
2243                                                 bool isDestroy = false) {
2244         auto* info = android::base::find(mDescriptorPoolInfo, descriptorPool);
2245         if (!info) return;
2246 
2247         for (auto it : info->allocedSetsToBoxed) {
2248             auto unboxedSet = it.first;
2249             auto boxedSet = it.second;
2250             mDescriptorSetInfo.erase(unboxedSet);
2251             if (!feature_is_enabled(kFeature_VulkanBatchedDescriptorSetUpdate)) {
2252                 delete_VkDescriptorSet(boxedSet);
2253             }
2254         }
2255 
2256         if (feature_is_enabled(kFeature_VulkanBatchedDescriptorSetUpdate)) {
2257             if (isDestroy) {
2258                 for (auto poolId : info->poolIds) {
2259                     delete_VkDescriptorSet((VkDescriptorSet)poolId);
2260                 }
2261             } else {
2262                 for (auto poolId : info->poolIds) {
2263                     auto handleInfo = sBoxedHandleManager.get(poolId);
2264                     if (handleInfo)
2265                         handleInfo->underlying = reinterpret_cast<uint64_t>(VK_NULL_HANDLE);
2266                 }
2267             }
2268         }
2269 
2270         info->usedSets = 0;
2271         info->allocedSetsToBoxed.clear();
2272 
2273         for (auto& pool : info->pools) {
2274             pool.used = 0;
2275         }
2276     }
2277 
on_vkDestroyDescriptorPool(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)2278     void on_vkDestroyDescriptorPool(android::base::BumpPool* pool, VkDevice boxed_device,
2279                                     VkDescriptorPool descriptorPool,
2280                                     const VkAllocationCallbacks* pAllocator) {
2281         auto device = unbox_VkDevice(boxed_device);
2282         auto vk = dispatch_VkDevice(boxed_device);
2283 
2284         vk->vkDestroyDescriptorPool(device, descriptorPool, pAllocator);
2285 
2286         std::lock_guard<std::recursive_mutex> lock(mLock);
2287         cleanupDescriptorPoolAllocedSetsLocked(descriptorPool, true /* destroy */);
2288         mDescriptorPoolInfo.erase(descriptorPool);
2289     }
2290 
on_vkResetDescriptorPool(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)2291     VkResult on_vkResetDescriptorPool(android::base::BumpPool* pool, VkDevice boxed_device,
2292                                       VkDescriptorPool descriptorPool,
2293                                       VkDescriptorPoolResetFlags flags) {
2294         auto device = unbox_VkDevice(boxed_device);
2295         auto vk = dispatch_VkDevice(boxed_device);
2296 
2297         auto res = vk->vkResetDescriptorPool(device, descriptorPool, flags);
2298 
2299         if (res == VK_SUCCESS) {
2300             std::lock_guard<std::recursive_mutex> lock(mLock);
2301             cleanupDescriptorPoolAllocedSetsLocked(descriptorPool);
2302         }
2303 
2304         return res;
2305     }
2306 
initDescriptorSetInfoLocked(VkDescriptorPool pool,VkDescriptorSetLayout setLayout,uint64_t boxedDescriptorSet,VkDescriptorSet descriptorSet)2307     void initDescriptorSetInfoLocked(VkDescriptorPool pool, VkDescriptorSetLayout setLayout,
2308                                      uint64_t boxedDescriptorSet, VkDescriptorSet descriptorSet) {
2309         auto* poolInfo = android::base::find(mDescriptorPoolInfo, pool);
2310         if (!poolInfo) {
2311             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << "Cannot find poolInfo";
2312         }
2313 
2314         auto* setLayoutInfo = android::base::find(mDescriptorSetLayoutInfo, setLayout);
2315         if (!setLayoutInfo) {
2316             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << "Cannot find setLayout";
2317         }
2318 
2319         auto& setInfo = mDescriptorSetInfo[descriptorSet];
2320 
2321         setInfo.pool = pool;
2322         setInfo.bindings = setLayoutInfo->bindings;
2323 
2324         poolInfo->allocedSetsToBoxed[descriptorSet] = (VkDescriptorSet)boxedDescriptorSet;
2325         applyDescriptorSetAllocationLocked(*poolInfo, setInfo.bindings);
2326     }
2327 
on_vkAllocateDescriptorSets(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)2328     VkResult on_vkAllocateDescriptorSets(android::base::BumpPool* pool, VkDevice boxed_device,
2329                                          const VkDescriptorSetAllocateInfo* pAllocateInfo,
2330                                          VkDescriptorSet* pDescriptorSets) {
2331         auto device = unbox_VkDevice(boxed_device);
2332         auto vk = dispatch_VkDevice(boxed_device);
2333 
2334         std::lock_guard<std::recursive_mutex> lock(mLock);
2335 
2336         auto allocValidationRes = validateDescriptorSetAllocLocked(pAllocateInfo);
2337         if (allocValidationRes != VK_SUCCESS) return allocValidationRes;
2338 
2339         auto res = vk->vkAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
2340 
2341         if (res == VK_SUCCESS) {
2342             auto* poolInfo =
2343                 android::base::find(mDescriptorPoolInfo, pAllocateInfo->descriptorPool);
2344             if (!poolInfo) return res;
2345 
2346             for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; ++i) {
2347                 auto unboxed = pDescriptorSets[i];
2348                 pDescriptorSets[i] = new_boxed_non_dispatchable_VkDescriptorSet(pDescriptorSets[i]);
2349                 initDescriptorSetInfoLocked(pAllocateInfo->descriptorPool,
2350                                             pAllocateInfo->pSetLayouts[i],
2351                                             (uint64_t)(pDescriptorSets[i]), unboxed);
2352             }
2353         }
2354 
2355         return res;
2356     }
2357 
on_vkFreeDescriptorSets(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)2358     VkResult on_vkFreeDescriptorSets(android::base::BumpPool* pool, VkDevice boxed_device,
2359                                      VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
2360                                      const VkDescriptorSet* pDescriptorSets) {
2361         auto device = unbox_VkDevice(boxed_device);
2362         auto vk = dispatch_VkDevice(boxed_device);
2363 
2364         auto res =
2365             vk->vkFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
2366 
2367         if (res == VK_SUCCESS) {
2368             std::lock_guard<std::recursive_mutex> lock(mLock);
2369 
2370             for (uint32_t i = 0; i < descriptorSetCount; ++i) {
2371                 auto* setInfo = android::base::find(mDescriptorSetInfo, pDescriptorSets[i]);
2372                 if (!setInfo) continue;
2373                 auto* poolInfo = android::base::find(mDescriptorPoolInfo, setInfo->pool);
2374                 if (!poolInfo) continue;
2375 
2376                 removeDescriptorSetAllocationLocked(*poolInfo, setInfo->bindings);
2377 
2378                 auto descSetAllocedEntry =
2379                     android::base::find(poolInfo->allocedSetsToBoxed, pDescriptorSets[i]);
2380                 if (!descSetAllocedEntry) continue;
2381 
2382                 auto handleInfo = sBoxedHandleManager.get((uint64_t)*descSetAllocedEntry);
2383                 if (handleInfo) {
2384                     if (feature_is_enabled(kFeature_VulkanBatchedDescriptorSetUpdate)) {
2385                         handleInfo->underlying = reinterpret_cast<uint64_t>(VK_NULL_HANDLE);
2386                     } else {
2387                         delete_VkDescriptorSet(*descSetAllocedEntry);
2388                     }
2389                 }
2390 
2391                 poolInfo->allocedSetsToBoxed.erase(pDescriptorSets[i]);
2392 
2393                 mDescriptorSetInfo.erase(pDescriptorSets[i]);
2394             }
2395         }
2396 
2397         return res;
2398     }
2399 
on_vkUpdateDescriptorSets(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)2400     void on_vkUpdateDescriptorSets(android::base::BumpPool* pool, VkDevice boxed_device,
2401                                    uint32_t descriptorWriteCount,
2402                                    const VkWriteDescriptorSet* pDescriptorWrites,
2403                                    uint32_t descriptorCopyCount,
2404                                    const VkCopyDescriptorSet* pDescriptorCopies) {
2405         auto device = unbox_VkDevice(boxed_device);
2406         auto vk = dispatch_VkDevice(boxed_device);
2407 
2408         std::lock_guard<std::recursive_mutex> lock(mLock);
2409         on_vkUpdateDescriptorSetsImpl(pool, vk, device, descriptorWriteCount, pDescriptorWrites,
2410                                       descriptorCopyCount, pDescriptorCopies);
2411     }
2412 
on_vkUpdateDescriptorSetsImpl(android::base::BumpPool * pool,VulkanDispatch * vk,VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)2413     void on_vkUpdateDescriptorSetsImpl(android::base::BumpPool* pool, VulkanDispatch* vk,
2414                                        VkDevice device, uint32_t descriptorWriteCount,
2415                                        const VkWriteDescriptorSet* pDescriptorWrites,
2416                                        uint32_t descriptorCopyCount,
2417                                        const VkCopyDescriptorSet* pDescriptorCopies) {
2418         bool needEmulateWriteDescriptor = false;
2419         // c++ seems to allow for 0-size array allocation
2420         std::unique_ptr<bool[]> descriptorWritesNeedDeepCopy(new bool[descriptorWriteCount]);
2421         for (uint32_t i = 0; i < descriptorWriteCount; i++) {
2422             const VkWriteDescriptorSet& descriptorWrite = pDescriptorWrites[i];
2423             descriptorWritesNeedDeepCopy[i] = false;
2424             if (descriptorWrite.descriptorType != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
2425                 continue;
2426             }
2427             for (uint32_t j = 0; j < descriptorWrite.descriptorCount; j++) {
2428                 const VkDescriptorImageInfo& imageInfo = descriptorWrite.pImageInfo[j];
2429                 const auto* imgViewInfo = android::base::find(mImageViewInfo, imageInfo.imageView);
2430                 const auto* samplerInfo = android::base::find(mSamplerInfo, imageInfo.sampler);
2431                 if (!imgViewInfo || !samplerInfo) continue;
2432                 if (imgViewInfo->needEmulatedAlpha && samplerInfo->needEmulatedAlpha) {
2433                     needEmulateWriteDescriptor = true;
2434                     descriptorWritesNeedDeepCopy[i] = true;
2435                     break;
2436                 }
2437             }
2438         }
2439         if (!needEmulateWriteDescriptor) {
2440             vk->vkUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites,
2441                                        descriptorCopyCount, pDescriptorCopies);
2442             return;
2443         }
2444         std::list<std::unique_ptr<VkDescriptorImageInfo[]>> imageInfoPool;
2445         std::unique_ptr<VkWriteDescriptorSet[]> descriptorWrites(
2446             new VkWriteDescriptorSet[descriptorWriteCount]);
2447         for (uint32_t i = 0; i < descriptorWriteCount; i++) {
2448             const VkWriteDescriptorSet& srcDescriptorWrite = pDescriptorWrites[i];
2449             VkWriteDescriptorSet& dstDescriptorWrite = descriptorWrites[i];
2450             // Shallow copy first
2451             dstDescriptorWrite = srcDescriptorWrite;
2452             if (!descriptorWritesNeedDeepCopy[i]) {
2453                 continue;
2454             }
2455             // Deep copy
2456             assert(dstDescriptorWrite.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
2457             imageInfoPool.emplace_back(
2458                 new VkDescriptorImageInfo[dstDescriptorWrite.descriptorCount]);
2459             VkDescriptorImageInfo* imageInfos = imageInfoPool.back().get();
2460             memcpy(imageInfos, srcDescriptorWrite.pImageInfo,
2461                    dstDescriptorWrite.descriptorCount * sizeof(VkDescriptorImageInfo));
2462             dstDescriptorWrite.pImageInfo = imageInfos;
2463             for (uint32_t j = 0; j < dstDescriptorWrite.descriptorCount; j++) {
2464                 VkDescriptorImageInfo& imageInfo = imageInfos[j];
2465                 const auto* imgViewInfo = android::base::find(mImageViewInfo, imageInfo.imageView);
2466                 auto* samplerInfo = android::base::find(mSamplerInfo, imageInfo.sampler);
2467                 if (!imgViewInfo || !samplerInfo) continue;
2468                 if (imgViewInfo->needEmulatedAlpha && samplerInfo->needEmulatedAlpha) {
2469                     if (samplerInfo->emulatedborderSampler == VK_NULL_HANDLE) {
2470                         // create the emulated sampler
2471                         VkSamplerCreateInfo createInfo;
2472                         deepcopy_VkSamplerCreateInfo(pool, VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
2473                                                      &samplerInfo->createInfo, &createInfo);
2474                         switch (createInfo.borderColor) {
2475                             case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK:
2476                                 createInfo.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK;
2477                                 break;
2478                             case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK:
2479                                 createInfo.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK;
2480                                 break;
2481                             case VK_BORDER_COLOR_FLOAT_CUSTOM_EXT:
2482                             case VK_BORDER_COLOR_INT_CUSTOM_EXT: {
2483                                 VkSamplerCustomBorderColorCreateInfoEXT*
2484                                     customBorderColorCreateInfo =
2485                                         vk_find_struct<VkSamplerCustomBorderColorCreateInfoEXT>(
2486                                             &createInfo);
2487                                 if (customBorderColorCreateInfo) {
2488                                     switch (createInfo.borderColor) {
2489                                         case VK_BORDER_COLOR_FLOAT_CUSTOM_EXT:
2490                                             customBorderColorCreateInfo->customBorderColor
2491                                                 .float32[3] = 1.0f;
2492                                             break;
2493                                         case VK_BORDER_COLOR_INT_CUSTOM_EXT:
2494                                             customBorderColorCreateInfo->customBorderColor
2495                                                 .int32[3] = 128;
2496                                             break;
2497                                         default:
2498                                             break;
2499                                     }
2500                                 }
2501                                 break;
2502                             }
2503                             default:
2504                                 break;
2505                         }
2506                         vk->vkCreateSampler(device, &createInfo, nullptr,
2507                                             &samplerInfo->emulatedborderSampler);
2508                     }
2509                     imageInfo.sampler = samplerInfo->emulatedborderSampler;
2510                 }
2511             }
2512         }
2513         vk->vkUpdateDescriptorSets(device, descriptorWriteCount, descriptorWrites.get(),
2514                                    descriptorCopyCount, pDescriptorCopies);
2515     }
2516 
2517     // jasonjason
on_vkCreateShaderModule(android::base::BumpPool * pool,VkDevice boxed_device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule)2518     VkResult on_vkCreateShaderModule(android::base::BumpPool* pool, VkDevice boxed_device,
2519                                      const VkShaderModuleCreateInfo* pCreateInfo,
2520                                      const VkAllocationCallbacks* pAllocator,
2521                                      VkShaderModule* pShaderModule) {
2522         auto device = unbox_VkDevice(boxed_device);
2523         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2524 
2525         VkResult result =
2526             deviceDispatch->vkCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
2527         if (result != VK_SUCCESS) {
2528             return result;
2529         }
2530 
2531         std::lock_guard<std::recursive_mutex> lock(mLock);
2532 
2533         auto& shaderModuleInfo = mShaderModuleInfo[*pShaderModule];
2534         shaderModuleInfo.device = device;
2535 
2536         *pShaderModule = new_boxed_non_dispatchable_VkShaderModule(*pShaderModule);
2537 
2538         return result;
2539     }
2540 
destroyShaderModuleLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)2541     void destroyShaderModuleLocked(VkDevice device, VulkanDispatch* deviceDispatch,
2542                                    VkShaderModule shaderModule,
2543                                    const VkAllocationCallbacks* pAllocator) {
2544         deviceDispatch->vkDestroyShaderModule(device, shaderModule, pAllocator);
2545 
2546         mShaderModuleInfo.erase(shaderModule);
2547     }
2548 
on_vkDestroyShaderModule(android::base::BumpPool * pool,VkDevice boxed_device,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)2549     void on_vkDestroyShaderModule(android::base::BumpPool* pool, VkDevice boxed_device,
2550                                   VkShaderModule shaderModule,
2551                                   const VkAllocationCallbacks* pAllocator) {
2552         auto device = unbox_VkDevice(boxed_device);
2553         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2554 
2555         std::lock_guard<std::recursive_mutex> lock(mLock);
2556         destroyShaderModuleLocked(device, deviceDispatch, shaderModule, pAllocator);
2557     }
2558 
on_vkCreatePipelineCache(android::base::BumpPool * pool,VkDevice boxed_device,const VkPipelineCacheCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineCache * pPipelineCache)2559     VkResult on_vkCreatePipelineCache(android::base::BumpPool* pool, VkDevice boxed_device,
2560                                       const VkPipelineCacheCreateInfo* pCreateInfo,
2561                                       const VkAllocationCallbacks* pAllocator,
2562                                       VkPipelineCache* pPipelineCache) {
2563         auto device = unbox_VkDevice(boxed_device);
2564         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2565 
2566         VkResult result =
2567             deviceDispatch->vkCreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
2568         if (result != VK_SUCCESS) {
2569             return result;
2570         }
2571 
2572         std::lock_guard<std::recursive_mutex> lock(mLock);
2573 
2574         auto& pipelineCacheInfo = mPipelineCacheInfo[*pPipelineCache];
2575         pipelineCacheInfo.device = device;
2576 
2577         *pPipelineCache = new_boxed_non_dispatchable_VkPipelineCache(*pPipelineCache);
2578 
2579         return result;
2580     }
2581 
destroyPipelineCacheLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)2582     void destroyPipelineCacheLocked(VkDevice device, VulkanDispatch* deviceDispatch,
2583                                     VkPipelineCache pipelineCache,
2584                                     const VkAllocationCallbacks* pAllocator) {
2585         deviceDispatch->vkDestroyPipelineCache(device, pipelineCache, pAllocator);
2586 
2587         mPipelineCacheInfo.erase(pipelineCache);
2588     }
2589 
on_vkDestroyPipelineCache(android::base::BumpPool * pool,VkDevice boxed_device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)2590     void on_vkDestroyPipelineCache(android::base::BumpPool* pool, VkDevice boxed_device,
2591                                    VkPipelineCache pipelineCache,
2592                                    const VkAllocationCallbacks* pAllocator) {
2593         auto device = unbox_VkDevice(boxed_device);
2594         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2595 
2596         std::lock_guard<std::recursive_mutex> lock(mLock);
2597         destroyPipelineCacheLocked(device, deviceDispatch, pipelineCache, pAllocator);
2598     }
2599 
on_vkCreateGraphicsPipelines(android::base::BumpPool * pool,VkDevice boxed_device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)2600     VkResult on_vkCreateGraphicsPipelines(android::base::BumpPool* pool, VkDevice boxed_device,
2601                                           VkPipelineCache pipelineCache, uint32_t createInfoCount,
2602                                           const VkGraphicsPipelineCreateInfo* pCreateInfos,
2603                                           const VkAllocationCallbacks* pAllocator,
2604                                           VkPipeline* pPipelines) {
2605         auto device = unbox_VkDevice(boxed_device);
2606         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2607 
2608         VkResult result = deviceDispatch->vkCreateGraphicsPipelines(
2609             device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
2610         if (result != VK_SUCCESS) {
2611             return result;
2612         }
2613 
2614         std::lock_guard<std::recursive_mutex> lock(mLock);
2615 
2616         for (uint32_t i = 0; i < createInfoCount; i++) {
2617             auto& pipelineInfo = mPipelineInfo[pPipelines[i]];
2618             pipelineInfo.device = device;
2619 
2620             pPipelines[i] = new_boxed_non_dispatchable_VkPipeline(pPipelines[i]);
2621         }
2622 
2623         return result;
2624     }
2625 
destroyPipelineLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)2626     void destroyPipelineLocked(VkDevice device, VulkanDispatch* deviceDispatch, VkPipeline pipeline,
2627                                const VkAllocationCallbacks* pAllocator) {
2628         deviceDispatch->vkDestroyPipeline(device, pipeline, pAllocator);
2629 
2630         mPipelineInfo.erase(pipeline);
2631     }
2632 
on_vkDestroyPipeline(android::base::BumpPool * pool,VkDevice boxed_device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)2633     void on_vkDestroyPipeline(android::base::BumpPool* pool, VkDevice boxed_device,
2634                               VkPipeline pipeline, const VkAllocationCallbacks* pAllocator) {
2635         auto device = unbox_VkDevice(boxed_device);
2636         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2637 
2638         std::lock_guard<std::recursive_mutex> lock(mLock);
2639         destroyPipelineLocked(device, deviceDispatch, pipeline, pAllocator);
2640     }
2641 
on_vkCmdCopyImage(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)2642     void on_vkCmdCopyImage(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
2643                            VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
2644                            VkImageLayout dstImageLayout, uint32_t regionCount,
2645                            const VkImageCopy* pRegions) {
2646         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
2647         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
2648 
2649         std::lock_guard<std::recursive_mutex> lock(mLock);
2650         auto* srcImg = android::base::find(mImageInfo, srcImage);
2651         auto* dstImg = android::base::find(mImageInfo, dstImage);
2652         if (!srcImg || !dstImg) return;
2653 
2654         VkDevice device = srcImg->cmpInfo.device();
2655         auto* deviceInfo = android::base::find(mDeviceInfo, device);
2656         if (!deviceInfo) return;
2657 
2658         bool needEmulatedSrc = deviceInfo->needEmulatedDecompression(srcImg->cmpInfo);
2659         bool needEmulatedDst = deviceInfo->needEmulatedDecompression(dstImg->cmpInfo);
2660         if (!needEmulatedSrc && !needEmulatedDst) {
2661             vk->vkCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout,
2662                                regionCount, pRegions);
2663             return;
2664         }
2665         VkImage srcImageMip = srcImage;
2666         VkImage dstImageMip = dstImage;
2667         for (uint32_t r = 0; r < regionCount; r++) {
2668             if (needEmulatedSrc) {
2669                 srcImageMip = srcImg->cmpInfo.compressedMipmap(pRegions[r].srcSubresource.mipLevel);
2670             }
2671             if (needEmulatedDst) {
2672                 dstImageMip = dstImg->cmpInfo.compressedMipmap(pRegions[r].dstSubresource.mipLevel);
2673             }
2674             VkImageCopy region = CompressedImageInfo::getCompressedMipmapsImageCopy(
2675                 pRegions[r], srcImg->cmpInfo, dstImg->cmpInfo, needEmulatedSrc, needEmulatedDst);
2676             vk->vkCmdCopyImage(commandBuffer, srcImageMip, srcImageLayout, dstImageMip,
2677                                dstImageLayout, 1, ®ion);
2678         }
2679     }
2680 
on_vkCmdCopyImageToBuffer(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)2681     void on_vkCmdCopyImageToBuffer(android::base::BumpPool* pool,
2682                                    VkCommandBuffer boxed_commandBuffer, VkImage srcImage,
2683                                    VkImageLayout srcImageLayout, VkBuffer dstBuffer,
2684                                    uint32_t regionCount, const VkBufferImageCopy* pRegions) {
2685         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
2686         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
2687 
2688         std::lock_guard<std::recursive_mutex> lock(mLock);
2689         auto* imageInfo = android::base::find(mImageInfo, srcImage);
2690         auto* bufferInfo = android::base::find(mBufferInfo, dstBuffer);
2691         if (!imageInfo || !bufferInfo) return;
2692         auto* deviceInfo = android::base::find(mDeviceInfo, bufferInfo->device);
2693         if (!deviceInfo) return;
2694         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
2695         if (!deviceInfo->needEmulatedDecompression(cmpInfo)) {
2696             vk->vkCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer,
2697                                        regionCount, pRegions);
2698             return;
2699         }
2700         for (uint32_t r = 0; r < regionCount; r++) {
2701             uint32_t mipLevel = pRegions[r].imageSubresource.mipLevel;
2702             VkBufferImageCopy region = cmpInfo.getBufferImageCopy(pRegions[r]);
2703             vk->vkCmdCopyImageToBuffer(commandBuffer, cmpInfo.compressedMipmap(mipLevel),
2704                                        srcImageLayout, dstBuffer, 1, ®ion);
2705         }
2706     }
2707 
on_vkGetImageMemoryRequirements(android::base::BumpPool * pool,VkDevice boxed_device,VkImage image,VkMemoryRequirements * pMemoryRequirements)2708     void on_vkGetImageMemoryRequirements(android::base::BumpPool* pool, VkDevice boxed_device,
2709                                          VkImage image, VkMemoryRequirements* pMemoryRequirements) {
2710         auto device = unbox_VkDevice(boxed_device);
2711         auto vk = dispatch_VkDevice(boxed_device);
2712         vk->vkGetImageMemoryRequirements(device, image, pMemoryRequirements);
2713         std::lock_guard<std::recursive_mutex> lock(mLock);
2714         updateImageMemorySizeLocked(device, image, pMemoryRequirements);
2715     }
2716 
on_vkGetImageMemoryRequirements2(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)2717     void on_vkGetImageMemoryRequirements2(android::base::BumpPool* pool, VkDevice boxed_device,
2718                                           const VkImageMemoryRequirementsInfo2* pInfo,
2719                                           VkMemoryRequirements2* pMemoryRequirements) {
2720         auto device = unbox_VkDevice(boxed_device);
2721         auto vk = dispatch_VkDevice(boxed_device);
2722         std::lock_guard<std::recursive_mutex> lock(mLock);
2723 
2724         auto physicalDevice = mDeviceToPhysicalDevice[device];
2725         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
2726         if (!physdevInfo) {
2727             // If this fails, we crash, as we assume that the memory properties
2728             // map should have the info.
2729             // fprintf(stderr, "%s: Could not get image memory requirement for VkPhysicalDevice\n");
2730         }
2731 
2732         if ((physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
2733             vk->vkGetImageMemoryRequirements2) {
2734             vk->vkGetImageMemoryRequirements2(device, pInfo, pMemoryRequirements);
2735         } else if (hasDeviceExtension(device, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME)) {
2736             vk->vkGetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
2737         } else {
2738             if (pInfo->pNext) {
2739                 ERR("Warning: trying to use extension struct in VkMemoryRequirements2 without "
2740                     "having enabled the extension!");
2741             }
2742 
2743             vk->vkGetImageMemoryRequirements(device, pInfo->image,
2744                                              &pMemoryRequirements->memoryRequirements);
2745         }
2746         updateImageMemorySizeLocked(device, pInfo->image, &pMemoryRequirements->memoryRequirements);
2747     }
2748 
on_vkGetBufferMemoryRequirements(android::base::BumpPool * pool,VkDevice boxed_device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)2749     void on_vkGetBufferMemoryRequirements(android::base::BumpPool* pool, VkDevice boxed_device,
2750                                           VkBuffer buffer,
2751                                           VkMemoryRequirements* pMemoryRequirements) {
2752         auto device = unbox_VkDevice(boxed_device);
2753         auto vk = dispatch_VkDevice(boxed_device);
2754         vk->vkGetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
2755     }
2756 
on_vkGetBufferMemoryRequirements2(android::base::BumpPool * pool,VkDevice boxed_device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)2757     void on_vkGetBufferMemoryRequirements2(android::base::BumpPool* pool, VkDevice boxed_device,
2758                                            const VkBufferMemoryRequirementsInfo2* pInfo,
2759                                            VkMemoryRequirements2* pMemoryRequirements) {
2760         auto device = unbox_VkDevice(boxed_device);
2761         auto vk = dispatch_VkDevice(boxed_device);
2762 
2763         std::lock_guard<std::recursive_mutex> lock(mLock);
2764 
2765         auto* physicalDevice = android::base::find(mDeviceToPhysicalDevice, device);
2766         if (!physicalDevice) {
2767             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
2768                 << "No physical device available for " << device;
2769         }
2770 
2771         auto* physicalDeviceInfo = android::base::find(mPhysdevInfo, *physicalDevice);
2772         if (!physicalDeviceInfo) {
2773             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
2774                 << "No physical device info available for " << *physicalDevice;
2775         }
2776 
2777         if ((physicalDeviceInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
2778             vk->vkGetBufferMemoryRequirements2) {
2779             vk->vkGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
2780         } else if (hasDeviceExtension(device, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME)) {
2781             vk->vkGetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
2782         } else {
2783             if (pInfo->pNext) {
2784                 ERR("Warning: trying to use extension struct in VkMemoryRequirements2 without "
2785                     "having enabled the extension!");
2786             }
2787 
2788             vk->vkGetBufferMemoryRequirements(device, pInfo->buffer,
2789                                               &pMemoryRequirements->memoryRequirements);
2790         }
2791     }
2792 
on_vkCmdCopyBufferToImage(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions,const VkDecoderContext & context)2793     void on_vkCmdCopyBufferToImage(android::base::BumpPool* pool,
2794                                    VkCommandBuffer boxed_commandBuffer, VkBuffer srcBuffer,
2795                                    VkImage dstImage, VkImageLayout dstImageLayout,
2796                                    uint32_t regionCount, const VkBufferImageCopy* pRegions,
2797                                    const VkDecoderContext& context) {
2798         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
2799         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
2800 
2801         std::lock_guard<std::recursive_mutex> lock(mLock);
2802         auto* imageInfo = android::base::find(mImageInfo, dstImage);
2803         if (!imageInfo) return;
2804         auto* bufferInfo = android::base::find(mBufferInfo, srcBuffer);
2805         if (!bufferInfo) {
2806             return;
2807         }
2808         VkDevice device = bufferInfo->device;
2809         auto* deviceInfo = android::base::find(mDeviceInfo, device);
2810         if (!deviceInfo) {
2811             return;
2812         }
2813         if (!deviceInfo->needEmulatedDecompression(imageInfo->cmpInfo)) {
2814             vk->vkCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout,
2815                                        regionCount, pRegions);
2816             return;
2817         }
2818         auto* cmdBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
2819         if (!cmdBufferInfo) {
2820             return;
2821         }
2822         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
2823         if (m_emu->astcLdrEmulationMode != AstcEmulationMode::CpuOnly) {
2824             for (uint32_t r = 0; r < regionCount; r++) {
2825                 uint32_t mipLevel = pRegions[r].imageSubresource.mipLevel;
2826                 VkBufferImageCopy region = cmpInfo.getBufferImageCopy(pRegions[r]);
2827                 vk->vkCmdCopyBufferToImage(commandBuffer, srcBuffer,
2828                                            cmpInfo.compressedMipmap(mipLevel), dstImageLayout, 1,
2829                                            ®ion);
2830             }
2831         }
2832 
2833         // Perform CPU decompression of ASTC textures, if enabled
2834         if (cmpInfo.canDecompressOnCpu()) {
2835             // Get a pointer to the compressed image memory
2836             const MemoryInfo* memoryInfo = android::base::find(mMemoryInfo, bufferInfo->memory);
2837             if (!memoryInfo) {
2838                 WARN("ASTC CPU decompression: couldn't find mapped memory info");
2839                 return;
2840             }
2841             if (!memoryInfo->ptr) {
2842                 WARN("ASTC CPU decompression: VkBuffer memory isn't host-visible");
2843                 return;
2844             }
2845             uint8_t* astcData = (uint8_t*)(memoryInfo->ptr) + bufferInfo->memoryOffset;
2846             cmpInfo.decompressOnCpu(commandBuffer, astcData, bufferInfo->size, dstImage,
2847                                     dstImageLayout, regionCount, pRegions, context);
2848         }
2849     }
2850 
convertQueueFamilyForeignToExternal(uint32_t * queueFamilyIndexPtr)2851     inline void convertQueueFamilyForeignToExternal(uint32_t* queueFamilyIndexPtr) {
2852         if (*queueFamilyIndexPtr == VK_QUEUE_FAMILY_FOREIGN_EXT) {
2853             *queueFamilyIndexPtr = VK_QUEUE_FAMILY_EXTERNAL;
2854         }
2855     }
2856 
convertQueueFamilyForeignToExternal_VkBufferMemoryBarrier(VkBufferMemoryBarrier * barrier)2857     inline void convertQueueFamilyForeignToExternal_VkBufferMemoryBarrier(
2858         VkBufferMemoryBarrier* barrier) {
2859         convertQueueFamilyForeignToExternal(&barrier->srcQueueFamilyIndex);
2860         convertQueueFamilyForeignToExternal(&barrier->dstQueueFamilyIndex);
2861     }
2862 
convertQueueFamilyForeignToExternal_VkImageMemoryBarrier(VkImageMemoryBarrier * barrier)2863     inline void convertQueueFamilyForeignToExternal_VkImageMemoryBarrier(
2864         VkImageMemoryBarrier* barrier) {
2865         convertQueueFamilyForeignToExternal(&barrier->srcQueueFamilyIndex);
2866         convertQueueFamilyForeignToExternal(&barrier->dstQueueFamilyIndex);
2867     }
2868 
on_vkCmdPipelineBarrier(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)2869     void on_vkCmdPipelineBarrier(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
2870                                  VkPipelineStageFlags srcStageMask,
2871                                  VkPipelineStageFlags dstStageMask,
2872                                  VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount,
2873                                  const VkMemoryBarrier* pMemoryBarriers,
2874                                  uint32_t bufferMemoryBarrierCount,
2875                                  const VkBufferMemoryBarrier* pBufferMemoryBarriers,
2876                                  uint32_t imageMemoryBarrierCount,
2877                                  const VkImageMemoryBarrier* pImageMemoryBarriers) {
2878         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
2879         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
2880 
2881         for (uint32_t i = 0; i < bufferMemoryBarrierCount; ++i) {
2882             convertQueueFamilyForeignToExternal_VkBufferMemoryBarrier(
2883                 ((VkBufferMemoryBarrier*)pBufferMemoryBarriers) + i);
2884         }
2885 
2886         for (uint32_t i = 0; i < imageMemoryBarrierCount; ++i) {
2887             convertQueueFamilyForeignToExternal_VkImageMemoryBarrier(
2888                 ((VkImageMemoryBarrier*)pImageMemoryBarriers) + i);
2889         }
2890 
2891         if (imageMemoryBarrierCount == 0) {
2892             vk->vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
2893                                      memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
2894                                      pBufferMemoryBarriers, imageMemoryBarrierCount,
2895                                      pImageMemoryBarriers);
2896             return;
2897         }
2898         std::lock_guard<std::recursive_mutex> lock(mLock);
2899         CommandBufferInfo* cmdBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
2900         if (!cmdBufferInfo) return;
2901 
2902         DeviceInfo* deviceInfo = android::base::find(mDeviceInfo, cmdBufferInfo->device);
2903         if (!deviceInfo) return;
2904 
2905         if (!deviceInfo->emulateTextureEtc2 && !deviceInfo->emulateTextureAstc) {
2906             vk->vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
2907                                      memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
2908                                      pBufferMemoryBarriers, imageMemoryBarrierCount,
2909                                      pImageMemoryBarriers);
2910             return;
2911         }
2912 
2913         // This is a compressed image. Handle decompression before calling vkCmdPipelineBarrier
2914 
2915         std::vector<VkImageMemoryBarrier> imageBarriers;
2916         bool needRebind = false;
2917 
2918         for (uint32_t i = 0; i < imageMemoryBarrierCount; i++) {
2919             const VkImageMemoryBarrier& srcBarrier = pImageMemoryBarriers[i];
2920             auto* imageInfo = android::base::find(mImageInfo, srcBarrier.image);
2921 
2922             // If the image was already decompressed on the CPU, or if we disabled GPU
2923             // decompression, nothing to do
2924             if (!imageInfo || !deviceInfo->needGpuDecompression(imageInfo->cmpInfo) ||
2925                 m_emu->astcLdrEmulationMode == AstcEmulationMode::CpuOnly) {
2926                 imageBarriers.push_back(srcBarrier);
2927                 continue;
2928             }
2929             if (srcBarrier.newLayout != VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL &&
2930                 srcBarrier.newLayout != VK_IMAGE_LAYOUT_GENERAL) {
2931                 fprintf(stderr,
2932                         "WARNING: unexpected usage to transfer "
2933                         "compressed image layout from %d to %d\n",
2934                         srcBarrier.oldLayout, srcBarrier.newLayout);
2935             }
2936 
2937             // Otherwise, decompress the image, if we're going to read from it.
2938             needRebind |= imageInfo->cmpInfo.decompressIfNeeded(
2939                 vk, commandBuffer, srcStageMask, dstStageMask, srcBarrier, imageBarriers);
2940         }
2941 
2942         if (needRebind && cmdBufferInfo->computePipeline) {
2943             // Recover pipeline bindings
2944             // TODO(gregschlom): instead of doing this here again and again after each image we
2945             // decompress, could we do it once before calling vkCmdDispatch?
2946             vk->vkCmdBindPipeline(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE,
2947                                   cmdBufferInfo->computePipeline);
2948             if (!cmdBufferInfo->descriptorSets.empty()) {
2949                 vk->vkCmdBindDescriptorSets(
2950                     commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, cmdBufferInfo->descriptorLayout,
2951                     cmdBufferInfo->firstSet, cmdBufferInfo->descriptorSets.size(),
2952                     cmdBufferInfo->descriptorSets.data(), cmdBufferInfo->dynamicOffsets.size(),
2953                     cmdBufferInfo->dynamicOffsets.data());
2954             }
2955         }
2956 
2957         // Apply the remaining barriers
2958         if (memoryBarrierCount || bufferMemoryBarrierCount || !imageBarriers.empty()) {
2959             vk->vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
2960                                      memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
2961                                      pBufferMemoryBarriers, imageBarriers.size(),
2962                                      imageBarriers.data());
2963         }
2964     }
2965 
mapHostVisibleMemoryToGuestPhysicalAddressLocked(VulkanDispatch * vk,VkDevice device,VkDeviceMemory memory,uint64_t physAddr)2966     bool mapHostVisibleMemoryToGuestPhysicalAddressLocked(VulkanDispatch* vk, VkDevice device,
2967                                                           VkDeviceMemory memory,
2968                                                           uint64_t physAddr) {
2969         if (!feature_is_enabled(kFeature_GLDirectMem) &&
2970             !feature_is_enabled(kFeature_VirtioGpuNext)) {
2971             // fprintf(stderr, "%s: Tried to use direct mapping "
2972             // "while GLDirectMem is not enabled!\n");
2973         }
2974 
2975         auto* info = android::base::find(mMemoryInfo, memory);
2976         if (!info) return false;
2977 
2978         info->guestPhysAddr = physAddr;
2979 
2980         constexpr size_t kPageBits = 12;
2981         constexpr size_t kPageSize = 1u << kPageBits;
2982         constexpr size_t kPageOffsetMask = kPageSize - 1;
2983 
2984         uintptr_t addr = reinterpret_cast<uintptr_t>(info->ptr);
2985         uintptr_t pageOffset = addr & kPageOffsetMask;
2986 
2987         info->pageAlignedHva = reinterpret_cast<void*>(addr - pageOffset);
2988         info->sizeToPage = ((info->size + pageOffset + kPageSize - 1) >> kPageBits) << kPageBits;
2989 
2990         if (mLogging) {
2991             fprintf(stderr, "%s: map: %p, %p -> [0x%llx 0x%llx]\n", __func__, info->ptr,
2992                     info->pageAlignedHva, (unsigned long long)info->guestPhysAddr,
2993                     (unsigned long long)info->guestPhysAddr + info->sizeToPage);
2994         }
2995 
2996         info->directMapped = true;
2997         uint64_t gpa = info->guestPhysAddr;
2998         void* hva = info->pageAlignedHva;
2999         size_t sizeToPage = info->sizeToPage;
3000 
3001         AutoLock occupiedGpasLock(mOccupiedGpasLock);
3002 
3003         auto* existingMemoryInfo = android::base::find(mOccupiedGpas, gpa);
3004         if (existingMemoryInfo) {
3005             fprintf(stderr, "%s: WARNING: already mapped gpa 0x%llx, replacing", __func__,
3006                     (unsigned long long)gpa);
3007 
3008             get_emugl_vm_operations().unmapUserBackedRam(existingMemoryInfo->gpa,
3009                                                          existingMemoryInfo->sizeToPage);
3010 
3011             mOccupiedGpas.erase(gpa);
3012         }
3013 
3014         get_emugl_vm_operations().mapUserBackedRam(gpa, hva, sizeToPage);
3015 
3016         if (mVerbosePrints) {
3017             fprintf(stderr, "VERBOSE:%s: registering gpa 0x%llx to mOccupiedGpas\n", __func__,
3018                     (unsigned long long)gpa);
3019         }
3020 
3021         mOccupiedGpas[gpa] = {
3022             vk, device, memory, gpa, sizeToPage,
3023         };
3024 
3025         if (!mUseOldMemoryCleanupPath) {
3026             get_emugl_address_space_device_control_ops().register_deallocation_callback(
3027                 this, gpa, [](void* thisPtr, uint64_t gpa) {
3028                     Impl* implPtr = (Impl*)thisPtr;
3029                     implPtr->unmapMemoryAtGpaIfExists(gpa);
3030                 });
3031         }
3032 
3033         return true;
3034     }
3035 
3036     // Only call this from the address space device deallocation operation's
3037     // context, or it's possible that the guest/host view of which gpa's are
3038     // occupied goes out of sync.
unmapMemoryAtGpaIfExists(uint64_t gpa)3039     void unmapMemoryAtGpaIfExists(uint64_t gpa) {
3040         AutoLock lock(mOccupiedGpasLock);
3041 
3042         if (mVerbosePrints) {
3043             fprintf(stderr, "VERBOSE:%s: deallocation callback for gpa 0x%llx\n", __func__,
3044                     (unsigned long long)gpa);
3045         }
3046 
3047         auto* existingMemoryInfo = android::base::find(mOccupiedGpas, gpa);
3048         if (!existingMemoryInfo) return;
3049 
3050         get_emugl_vm_operations().unmapUserBackedRam(existingMemoryInfo->gpa,
3051                                                      existingMemoryInfo->sizeToPage);
3052 
3053         mOccupiedGpas.erase(gpa);
3054     }
3055 
on_vkAllocateMemory(android::base::BumpPool * pool,VkDevice boxed_device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)3056     VkResult on_vkAllocateMemory(android::base::BumpPool* pool, VkDevice boxed_device,
3057                                  const VkMemoryAllocateInfo* pAllocateInfo,
3058                                  const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) {
3059         auto device = unbox_VkDevice(boxed_device);
3060         auto vk = dispatch_VkDevice(boxed_device);
3061 
3062         if (!pAllocateInfo) return VK_ERROR_INITIALIZATION_FAILED;
3063 
3064         VkMemoryAllocateInfo localAllocInfo = vk_make_orphan_copy(*pAllocateInfo);
3065         vk_struct_chain_iterator structChainIter = vk_make_chain_iterator(&localAllocInfo);
3066 
3067         const VkMemoryDedicatedAllocateInfo* dedicatedAllocInfoPtr =
3068             vk_find_struct<VkMemoryDedicatedAllocateInfo>(pAllocateInfo);
3069         VkMemoryDedicatedAllocateInfo localDedicatedAllocInfo;
3070 
3071         if (dedicatedAllocInfoPtr) {
3072             localDedicatedAllocInfo = vk_make_orphan_copy(*dedicatedAllocInfoPtr);
3073         }
3074         // Note for AHardwareBuffers, the Vulkan spec states:
3075         //
3076         //     Android hardware buffers have intrinsic width, height, format, and usage
3077         //     properties, so Vulkan images bound to memory imported from an Android
3078         //     hardware buffer must use dedicated allocations
3079         //
3080         // so any allocation requests with a VkImportAndroidHardwareBufferInfoANDROID
3081         // will necessarily have a VkMemoryDedicatedAllocateInfo. However, the host
3082         // may or may not actually use a dedicated allocations during Buffer/ColorBuffer
3083         // setup. Below checks if the underlying Buffer/ColorBuffer backing memory was
3084         // originally created with a dedicated allocation.
3085         bool shouldUseDedicatedAllocInfo = dedicatedAllocInfoPtr != nullptr;
3086 
3087         const VkImportColorBufferGOOGLE* importCbInfoPtr =
3088             vk_find_struct<VkImportColorBufferGOOGLE>(pAllocateInfo);
3089         const VkImportBufferGOOGLE* importBufferInfoPtr =
3090             vk_find_struct<VkImportBufferGOOGLE>(pAllocateInfo);
3091 
3092         const VkCreateBlobGOOGLE* createBlobInfoPtr =
3093             vk_find_struct<VkCreateBlobGOOGLE>(pAllocateInfo);
3094 
3095 #ifdef _WIN32
3096         VkImportMemoryWin32HandleInfoKHR importInfo{
3097             VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
3098             0,
3099             VK_EXT_MEMORY_HANDLE_TYPE_BIT,
3100             VK_EXT_MEMORY_HANDLE_INVALID,
3101             L"",
3102         };
3103 #else
3104         VkImportMemoryFdInfoKHR importInfo{
3105             VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
3106             0,
3107             VK_EXT_MEMORY_HANDLE_TYPE_BIT,
3108             VK_EXT_MEMORY_HANDLE_INVALID,
3109         };
3110 #endif
3111 
3112         void* mappedPtr = nullptr;
3113         ManagedDescriptor externalMemoryHandle;
3114         if (importCbInfoPtr) {
3115             bool vulkanOnly = mGuestUsesAngle;
3116 
3117             bool colorBufferMemoryUsesDedicatedAlloc = false;
3118             if (!getColorBufferAllocationInfo(importCbInfoPtr->colorBuffer,
3119                                               &localAllocInfo.allocationSize,
3120                                               &localAllocInfo.memoryTypeIndex,
3121                                               &colorBufferMemoryUsesDedicatedAlloc, &mappedPtr)) {
3122                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
3123                     << "Failed to get allocation info for ColorBuffer:"
3124                     << importCbInfoPtr->colorBuffer;
3125             }
3126 
3127             shouldUseDedicatedAllocInfo &= colorBufferMemoryUsesDedicatedAlloc;
3128 
3129             if (!vulkanOnly) {
3130                 auto fb = FrameBuffer::getFB();
3131                 if (fb) {
3132                     fb->invalidateColorBufferForVk(importCbInfoPtr->colorBuffer);
3133                 }
3134             }
3135 
3136             if (m_emu->instanceSupportsExternalMemoryCapabilities) {
3137                 VK_EXT_MEMORY_HANDLE cbExtMemoryHandle =
3138                     getColorBufferExtMemoryHandle(importCbInfoPtr->colorBuffer);
3139 
3140                 if (cbExtMemoryHandle == VK_EXT_MEMORY_HANDLE_INVALID) {
3141                     fprintf(stderr,
3142                             "%s: VK_ERROR_OUT_OF_DEVICE_MEMORY: "
3143                             "colorBuffer 0x%x does not have Vulkan external memory backing\n",
3144                             __func__, importCbInfoPtr->colorBuffer);
3145                     return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3146                 }
3147 
3148                 externalMemoryHandle = ManagedDescriptor(dupExternalMemory(cbExtMemoryHandle));
3149 
3150 #ifdef _WIN32
3151                 importInfo.handle = externalMemoryHandle.get().value_or(static_cast<HANDLE>(NULL));
3152 #else
3153                 importInfo.fd = externalMemoryHandle.get().value_or(-1);
3154 #endif
3155                 vk_append_struct(&structChainIter, &importInfo);
3156             }
3157         }
3158 
3159         if (importBufferInfoPtr) {
3160             bool bufferMemoryUsesDedicatedAlloc = false;
3161             if (!getBufferAllocationInfo(
3162                     importBufferInfoPtr->buffer, &localAllocInfo.allocationSize,
3163                     &localAllocInfo.memoryTypeIndex, &bufferMemoryUsesDedicatedAlloc)) {
3164                 ERR("Failed to get Buffer:%d allocation info.", importBufferInfoPtr->buffer);
3165                 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3166             }
3167 
3168             shouldUseDedicatedAllocInfo &= bufferMemoryUsesDedicatedAlloc;
3169 
3170             if (m_emu->instanceSupportsExternalMemoryCapabilities) {
3171                 VK_EXT_MEMORY_HANDLE bufferExtMemoryHandle =
3172                     getBufferExtMemoryHandle(importBufferInfoPtr->buffer);
3173 
3174                 if (bufferExtMemoryHandle == VK_EXT_MEMORY_HANDLE_INVALID) {
3175                     fprintf(stderr,
3176                             "%s: VK_ERROR_OUT_OF_DEVICE_MEMORY: "
3177                             "buffer 0x%x does not have Vulkan external memory "
3178                             "backing\n",
3179                             __func__, importBufferInfoPtr->buffer);
3180                     return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3181                 }
3182 
3183                 bufferExtMemoryHandle = dupExternalMemory(bufferExtMemoryHandle);
3184 
3185 #ifdef _WIN32
3186                 importInfo.handle = bufferExtMemoryHandle;
3187 #else
3188                 importInfo.fd = bufferExtMemoryHandle;
3189 #endif
3190                 vk_append_struct(&structChainIter, &importInfo);
3191             }
3192         }
3193 
3194         VkMemoryPropertyFlags memoryPropertyFlags;
3195         {
3196             std::lock_guard<std::recursive_mutex> lock(mLock);
3197 
3198             auto* physdev = android::base::find(mDeviceToPhysicalDevice, device);
3199             if (!physdev) {
3200                 // User app gave an invalid VkDevice, but we don't really want to crash here.
3201                 // We should allow invalid apps.
3202                 return VK_ERROR_DEVICE_LOST;
3203             }
3204 
3205             auto* physdevInfo = android::base::find(mPhysdevInfo, *physdev);
3206             if (!physdevInfo) {
3207                 // If this fails, we crash, as we assume that the memory properties map should have
3208                 // the info.
3209                 fprintf(stderr, "Error: Could not get memory properties for VkPhysicalDevice\n");
3210             }
3211 
3212             // If the memory was allocated with a type index that corresponds
3213             // to a memory type that is host visible, let's also map the entire
3214             // thing.
3215 
3216             // First, check validity of the user's type index.
3217             if (localAllocInfo.memoryTypeIndex >= physdevInfo->memoryProperties.memoryTypeCount) {
3218                 // Continue allowing invalid behavior.
3219                 return VK_ERROR_INCOMPATIBLE_DRIVER;
3220             }
3221             memoryPropertyFlags =
3222                 physdevInfo->memoryProperties.memoryTypes[localAllocInfo.memoryTypeIndex]
3223                     .propertyFlags;
3224         }
3225 
3226         if (shouldUseDedicatedAllocInfo) {
3227             vk_append_struct(&structChainIter, &localDedicatedAllocInfo);
3228         }
3229 
3230         VkExportMemoryAllocateInfo exportAllocate = {
3231             .sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
3232             .pNext = NULL,
3233         };
3234 
3235 #ifdef __unix__
3236         exportAllocate.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
3237 #endif
3238 
3239 #ifdef __linux__
3240         if (hasDeviceExtension(device, VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME)) {
3241             exportAllocate.handleTypes |= VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
3242         }
3243 #endif
3244 
3245 #ifdef _WIN32
3246         exportAllocate.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT;
3247 #endif
3248 
3249         bool hostVisible = memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3250         if (hostVisible && feature_is_enabled(kFeature_ExternalBlob)) {
3251             vk_append_struct(&structChainIter, &exportAllocate);
3252         }
3253 
3254         if (createBlobInfoPtr && createBlobInfoPtr->blobMem == STREAM_BLOB_MEM_GUEST &&
3255             (createBlobInfoPtr->blobFlags & STREAM_BLOB_FLAG_CREATE_GUEST_HANDLE)) {
3256             DescriptorType rawDescriptor;
3257             auto descriptorInfoOpt =
3258                 HostmemIdMapping::get()->removeDescriptorInfo(createBlobInfoPtr->blobId);
3259             if (descriptorInfoOpt) {
3260                 auto rawDescriptorOpt = (*descriptorInfoOpt).descriptor.release();
3261                 if (rawDescriptorOpt) {
3262                     rawDescriptor = *rawDescriptorOpt;
3263                 } else {
3264                     return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3265                 }
3266             } else {
3267                 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3268             }
3269 #if defined(__linux__) || defined(__QNX__)
3270             importInfo.fd = rawDescriptor;
3271 #endif
3272 
3273 #ifdef __linux__
3274             if (hasDeviceExtension(device, VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME)) {
3275                 importInfo.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
3276             }
3277 #endif
3278             vk_append_struct(&structChainIter, &importInfo);
3279         }
3280 
3281         VkImportMemoryHostPointerInfoEXT importHostInfo;
3282         std::optional<SharedMemory> sharedMemory = std::nullopt;
3283 
3284         // TODO(b/261222354): Make sure the feature exists when initializing sVkEmulation.
3285         if (hostVisible && feature_is_enabled(kFeature_SystemBlob)) {
3286             // Ensure size is page-aligned.
3287             VkDeviceSize alignedSize = __ALIGN(localAllocInfo.allocationSize, kPageSizeforBlob);
3288             if (alignedSize != localAllocInfo.allocationSize) {
3289                 ERR("Warning: Aligning allocation size from %llu to %llu",
3290                     static_cast<unsigned long long>(localAllocInfo.allocationSize),
3291                     static_cast<unsigned long long>(alignedSize));
3292             }
3293             localAllocInfo.allocationSize = alignedSize;
3294 
3295             static std::atomic<uint64_t> uniqueShmemId = 0;
3296             sharedMemory = SharedMemory("shared-memory-vk-" + std::to_string(uniqueShmemId++),
3297                                         localAllocInfo.allocationSize);
3298             int ret = sharedMemory->create(0600);
3299             if (ret) {
3300                 ERR("Failed to create system-blob host-visible memory, error: %d", ret);
3301                 return VK_ERROR_OUT_OF_HOST_MEMORY;
3302             }
3303             mappedPtr = sharedMemory->get();
3304             int mappedPtrAlignment = reinterpret_cast<uintptr_t>(mappedPtr) % kPageSizeforBlob;
3305             if (mappedPtrAlignment != 0) {
3306                 ERR("Warning: Mapped shared memory pointer is not aligned to page size, alignment "
3307                     "is: %d",
3308                     mappedPtrAlignment);
3309             }
3310             importHostInfo = {.sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
3311                               .pNext = NULL,
3312                               .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
3313                               .pHostPointer = mappedPtr};
3314             localAllocInfo.pNext = &importHostInfo;
3315         }
3316 
3317         VkResult result = vk->vkAllocateMemory(device, &localAllocInfo, pAllocator, pMemory);
3318 
3319         if (result != VK_SUCCESS) {
3320             return result;
3321         }
3322 
3323 #ifdef _WIN32
3324         // Let ManagedDescriptor to close the underlying HANDLE when going out of scope. From the
3325         // VkImportMemoryWin32HandleInfoKHR spec: Importing memory object payloads from Windows
3326         // handles does not transfer ownership of the handle to the Vulkan implementation. For
3327         // handle types defined as NT handles, the application must release handle ownership using
3328         // the CloseHandle system call when the handle is no longer needed. For handle types defined
3329         // as NT handles, the imported memory object holds a reference to its payload.
3330 #else
3331         // Tell ManagedDescriptor not to close the underlying fd, because the ownership has already
3332         // been transferred to the Vulkan implementation. From VkImportMemoryFdInfoKHR spec:
3333         // Importing memory from a file descriptor transfers ownership of the file descriptor from
3334         // the application to the Vulkan implementation. The application must not perform any
3335         // operations on the file descriptor after a successful import. The imported memory object
3336         // holds a reference to its payload.
3337         externalMemoryHandle.release();
3338 #endif
3339 
3340         std::lock_guard<std::recursive_mutex> lock(mLock);
3341 
3342         mMemoryInfo[*pMemory] = MemoryInfo();
3343         auto& memoryInfo = mMemoryInfo[*pMemory];
3344         memoryInfo.size = localAllocInfo.allocationSize;
3345         memoryInfo.device = device;
3346         memoryInfo.memoryIndex = localAllocInfo.memoryTypeIndex;
3347 #ifdef VK_MVK_moltenvk
3348         if (importCbInfoPtr && m_emu->instanceSupportsMoltenVK) {
3349             memoryInfo.mtlTexture = getColorBufferMTLTexture(importCbInfoPtr->colorBuffer);
3350         }
3351 #endif
3352 
3353         if (!hostVisible) {
3354             *pMemory = new_boxed_non_dispatchable_VkDeviceMemory(*pMemory);
3355             return result;
3356         }
3357 
3358         if (memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) {
3359             memoryInfo.caching = MAP_CACHE_CACHED;
3360         } else if (memoryPropertyFlags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD) {
3361             memoryInfo.caching = MAP_CACHE_UNCACHED;
3362         } else if (memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) {
3363             memoryInfo.caching = MAP_CACHE_WC;
3364         }
3365 
3366         VkInstance* instance = deviceToInstanceLocked(device);
3367         InstanceInfo* instanceInfo = android::base::find(mInstanceInfo, *instance);
3368 
3369         // If gfxstream needs to be able to read from this memory, needToMap should be true.
3370         // When external blobs are off, we always want to map HOST_VISIBLE memory. Because, we run
3371         // in the same process as the guest.
3372         // When external blobs are on, we want to map memory only if a workaround is using it in
3373         // the gfxstream process. This happens when ASTC CPU emulation is on.
3374         bool needToMap =
3375             (!feature_is_enabled(kFeature_ExternalBlob) || instanceInfo->useAstcCpuDecompression) &&
3376             !createBlobInfoPtr;
3377 
3378         // Some cases provide a mappedPtr, so we only map if we still don't have a pointer here.
3379         if (!mappedPtr && needToMap) {
3380             memoryInfo.needUnmap = true;
3381             VkResult mapResult =
3382                 vk->vkMapMemory(device, *pMemory, 0, memoryInfo.size, 0, &memoryInfo.ptr);
3383             if (mapResult != VK_SUCCESS) {
3384                 freeMemoryLocked(vk, device, *pMemory, pAllocator);
3385                 *pMemory = VK_NULL_HANDLE;
3386                 return VK_ERROR_OUT_OF_HOST_MEMORY;
3387             }
3388         } else {
3389             // Since we didn't call vkMapMemory, unmapping is not needed (don't own mappedPtr).
3390             memoryInfo.needUnmap = false;
3391             memoryInfo.ptr = mappedPtr;
3392 
3393             if (createBlobInfoPtr) {
3394                 memoryInfo.blobId = createBlobInfoPtr->blobId;
3395             }
3396 
3397             // Always assign the shared memory into memoryInfo. If it was used, then it will have
3398             // ownership transferred.
3399             memoryInfo.sharedMemory = std::exchange(sharedMemory, std::nullopt);
3400         }
3401 
3402         *pMemory = new_boxed_non_dispatchable_VkDeviceMemory(*pMemory);
3403 
3404         return result;
3405     }
3406 
freeMemoryLocked(VulkanDispatch * vk,VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)3407     void freeMemoryLocked(VulkanDispatch* vk, VkDevice device, VkDeviceMemory memory,
3408                           const VkAllocationCallbacks* pAllocator) {
3409         auto* info = android::base::find(mMemoryInfo, memory);
3410         if (!info) return;  // Invalid usage.
3411 
3412 #ifdef __APPLE__
3413         if (info->mtlTexture) {
3414             CFRelease(info->mtlTexture);
3415             info->mtlTexture = nullptr;
3416         }
3417 #endif
3418 
3419         if (info->directMapped) {
3420             // if direct mapped, we leave it up to the guest address space driver
3421             // to control the unmapping of kvm slot on the host side
3422             // in order to avoid situations where
3423             //
3424             // 1. we try to unmap here and deadlock
3425             //
3426             // 2. unmapping at the wrong time (possibility of a parallel call
3427             // to unmap vs. address space allocate and mapMemory leading to
3428             // mapping the same gpa twice)
3429             if (mUseOldMemoryCleanupPath) {
3430                 unmapMemoryAtGpaIfExists(info->guestPhysAddr);
3431             }
3432         }
3433 
3434         if (info->virtioGpuMapped) {
3435             if (mLogging) {
3436                 fprintf(stderr, "%s: unmap hostmem %p id 0x%llx\n", __func__, info->ptr,
3437                         (unsigned long long)info->hostmemId);
3438             }
3439 
3440             get_emugl_vm_operations().hostmemUnregister(info->hostmemId);
3441         }
3442 
3443         if (info->needUnmap && info->ptr) {
3444             vk->vkUnmapMemory(device, memory);
3445         }
3446 
3447         vk->vkFreeMemory(device, memory, pAllocator);
3448 
3449         mMemoryInfo.erase(memory);
3450     }
3451 
on_vkFreeMemory(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)3452     void on_vkFreeMemory(android::base::BumpPool* pool, VkDevice boxed_device,
3453                          VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) {
3454         auto device = unbox_VkDevice(boxed_device);
3455         auto vk = dispatch_VkDevice(boxed_device);
3456 
3457         std::lock_guard<std::recursive_mutex> lock(mLock);
3458 
3459         freeMemoryLocked(vk, device, memory, pAllocator);
3460     }
3461 
on_vkMapMemory(android::base::BumpPool * pool,VkDevice,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)3462     VkResult on_vkMapMemory(android::base::BumpPool* pool, VkDevice, VkDeviceMemory memory,
3463                             VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags,
3464                             void** ppData) {
3465         std::lock_guard<std::recursive_mutex> lock(mLock);
3466         return on_vkMapMemoryLocked(0, memory, offset, size, flags, ppData);
3467     }
on_vkMapMemoryLocked(VkDevice,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)3468     VkResult on_vkMapMemoryLocked(VkDevice, VkDeviceMemory memory, VkDeviceSize offset,
3469                                   VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) {
3470         auto* info = android::base::find(mMemoryInfo, memory);
3471         if (!info || !info->ptr) return VK_ERROR_MEMORY_MAP_FAILED;  // Invalid usage.
3472 
3473         *ppData = (void*)((uint8_t*)info->ptr + offset);
3474         return VK_SUCCESS;
3475     }
3476 
on_vkUnmapMemory(android::base::BumpPool * pool,VkDevice,VkDeviceMemory)3477     void on_vkUnmapMemory(android::base::BumpPool* pool, VkDevice, VkDeviceMemory) {
3478         // no-op; user-level mapping does not correspond
3479         // to any operation here.
3480     }
3481 
getMappedHostPointer(VkDeviceMemory memory)3482     uint8_t* getMappedHostPointer(VkDeviceMemory memory) {
3483         std::lock_guard<std::recursive_mutex> lock(mLock);
3484 
3485         auto* info = android::base::find(mMemoryInfo, memory);
3486         if (!info) return nullptr;
3487 
3488         return (uint8_t*)(info->ptr);
3489     }
3490 
getDeviceMemorySize(VkDeviceMemory memory)3491     VkDeviceSize getDeviceMemorySize(VkDeviceMemory memory) {
3492         std::lock_guard<std::recursive_mutex> lock(mLock);
3493 
3494         auto* info = android::base::find(mMemoryInfo, memory);
3495         if (!info) return 0;
3496 
3497         return info->size;
3498     }
3499 
usingDirectMapping() const3500     bool usingDirectMapping() const {
3501         return feature_is_enabled(kFeature_GLDirectMem) ||
3502                feature_is_enabled(kFeature_VirtioGpuNext);
3503     }
3504 
getHostFeatureSupport() const3505     HostFeatureSupport getHostFeatureSupport() const {
3506         HostFeatureSupport res;
3507 
3508         if (!m_vk) return res;
3509 
3510         auto emu = getGlobalVkEmulation();
3511 
3512         res.supportsVulkan = emu && emu->live;
3513 
3514         if (!res.supportsVulkan) return res;
3515 
3516         const auto& props = emu->deviceInfo.physdevProps;
3517 
3518         res.supportsVulkan1_1 = props.apiVersion >= VK_API_VERSION_1_1;
3519         res.supportsExternalMemory = emu->deviceInfo.supportsExternalMemory;
3520         res.useDeferredCommands = emu->useDeferredCommands;
3521         res.useCreateResourcesWithRequirements = emu->useCreateResourcesWithRequirements;
3522 
3523         res.apiVersion = props.apiVersion;
3524         res.driverVersion = props.driverVersion;
3525         res.deviceID = props.deviceID;
3526         res.vendorID = props.vendorID;
3527         return res;
3528     }
3529 
hasInstanceExtension(VkInstance instance,const std::string & name)3530     bool hasInstanceExtension(VkInstance instance, const std::string& name) {
3531         auto* info = android::base::find(mInstanceInfo, instance);
3532         if (!info) return false;
3533 
3534         for (const auto& enabledName : info->enabledExtensionNames) {
3535             if (name == enabledName) return true;
3536         }
3537 
3538         return false;
3539     }
3540 
hasDeviceExtension(VkDevice device,const std::string & name)3541     bool hasDeviceExtension(VkDevice device, const std::string& name) {
3542         auto* info = android::base::find(mDeviceInfo, device);
3543         if (!info) return false;
3544 
3545         for (const auto& enabledName : info->enabledExtensionNames) {
3546             if (name == enabledName) return true;
3547         }
3548 
3549         return false;
3550     }
3551 
3552     // Returns whether a vector of VkExtensionProperties contains a particular extension
hasDeviceExtension(const std::vector<VkExtensionProperties> & properties,const char * name)3553     bool hasDeviceExtension(const std::vector<VkExtensionProperties>& properties,
3554                             const char* name) {
3555         for (const auto& prop : properties) {
3556             if (strcmp(prop.extensionName, name) == 0) return true;
3557         }
3558         return false;
3559     }
3560 
3561     // Convenience function to call vkEnumerateDeviceExtensionProperties and get the results as an
3562     // std::vector
enumerateDeviceExtensionProperties(VulkanDispatch * vk,VkPhysicalDevice physicalDevice,const char * pLayerName,std::vector<VkExtensionProperties> & properties)3563     VkResult enumerateDeviceExtensionProperties(VulkanDispatch* vk, VkPhysicalDevice physicalDevice,
3564                                                 const char* pLayerName,
3565                                                 std::vector<VkExtensionProperties>& properties) {
3566         uint32_t propertyCount = 0;
3567         VkResult result = vk->vkEnumerateDeviceExtensionProperties(physicalDevice, pLayerName,
3568                                                                    &propertyCount, nullptr);
3569         if (result != VK_SUCCESS) return result;
3570 
3571         properties.resize(propertyCount);
3572         return vk->vkEnumerateDeviceExtensionProperties(physicalDevice, pLayerName, &propertyCount,
3573                                                         properties.data());
3574     }
3575 
3576     // VK_ANDROID_native_buffer
on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool * pool,VkDevice,VkFormat format,VkImageUsageFlags imageUsage,int * grallocUsage)3577     VkResult on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool* pool, VkDevice,
3578                                                   VkFormat format, VkImageUsageFlags imageUsage,
3579                                                   int* grallocUsage) {
3580         getGralloc0Usage(format, imageUsage, grallocUsage);
3581         return VK_SUCCESS;
3582     }
3583 
on_vkGetSwapchainGrallocUsage2ANDROID(android::base::BumpPool * pool,VkDevice,VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)3584     VkResult on_vkGetSwapchainGrallocUsage2ANDROID(
3585         android::base::BumpPool* pool, VkDevice, VkFormat format, VkImageUsageFlags imageUsage,
3586         VkSwapchainImageUsageFlagsANDROID swapchainImageUsage, uint64_t* grallocConsumerUsage,
3587         uint64_t* grallocProducerUsage) {
3588         getGralloc1Usage(format, imageUsage, swapchainImageUsage, grallocConsumerUsage,
3589                          grallocProducerUsage);
3590         return VK_SUCCESS;
3591     }
3592 
on_vkAcquireImageANDROID(android::base::BumpPool * pool,VkDevice boxed_device,VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)3593     VkResult on_vkAcquireImageANDROID(android::base::BumpPool* pool, VkDevice boxed_device,
3594                                       VkImage image, int nativeFenceFd, VkSemaphore semaphore,
3595                                       VkFence fence) {
3596         auto device = unbox_VkDevice(boxed_device);
3597         auto vk = dispatch_VkDevice(boxed_device);
3598 
3599         std::lock_guard<std::recursive_mutex> lock(mLock);
3600 
3601         auto* imageInfo = android::base::find(mImageInfo, image);
3602         if (!imageInfo) {
3603             return VK_ERROR_INITIALIZATION_FAILED;
3604         }
3605 
3606         VkQueue defaultQueue;
3607         uint32_t defaultQueueFamilyIndex;
3608         Lock* defaultQueueLock;
3609         if (!getDefaultQueueForDeviceLocked(device, &defaultQueue, &defaultQueueFamilyIndex,
3610                                             &defaultQueueLock)) {
3611             fprintf(stderr, "%s: cant get the default q\n", __func__);
3612             return VK_ERROR_INITIALIZATION_FAILED;
3613         }
3614 
3615         AndroidNativeBufferInfo* anbInfo = imageInfo->anbInfo.get();
3616 
3617         return setAndroidNativeImageSemaphoreSignaled(vk, device, defaultQueue,
3618                                                       defaultQueueFamilyIndex, defaultQueueLock,
3619                                                       semaphore, fence, anbInfo);
3620     }
3621 
on_vkQueueSignalReleaseImageANDROID(android::base::BumpPool * pool,VkQueue boxed_queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)3622     VkResult on_vkQueueSignalReleaseImageANDROID(android::base::BumpPool* pool, VkQueue boxed_queue,
3623                                                  uint32_t waitSemaphoreCount,
3624                                                  const VkSemaphore* pWaitSemaphores, VkImage image,
3625                                                  int* pNativeFenceFd) {
3626         auto queue = unbox_VkQueue(boxed_queue);
3627         auto vk = dispatch_VkQueue(boxed_queue);
3628 
3629         std::lock_guard<std::recursive_mutex> lock(mLock);
3630 
3631         auto* queueInfo = android::base::find(mQueueInfo, queue);
3632         if (!queueInfo) return VK_ERROR_INITIALIZATION_FAILED;
3633 
3634         if (mRenderDocWithMultipleVkInstances) {
3635             VkPhysicalDevice vkPhysicalDevice = mDeviceToPhysicalDevice.at(queueInfo->device);
3636             VkInstance vkInstance = mPhysicalDeviceToInstance.at(vkPhysicalDevice);
3637             mRenderDocWithMultipleVkInstances->onFrameDelimiter(vkInstance);
3638         }
3639 
3640         auto* imageInfo = android::base::find(mImageInfo, image);
3641         auto anbInfo = imageInfo->anbInfo;
3642 
3643         if (anbInfo->useVulkanNativeImage) {
3644             // vkQueueSignalReleaseImageANDROID() is only called by the Android framework's
3645             // implementation of vkQueuePresentKHR(). The guest application is responsible for
3646             // transitioning the image layout of the image passed to vkQueuePresentKHR() to
3647             // VK_IMAGE_LAYOUT_PRESENT_SRC_KHR before the call. If the host is using native
3648             // Vulkan images where `image` is backed with the same memory as its ColorBuffer,
3649             // then we need to update the tracked layout for that ColorBuffer.
3650             setColorBufferCurrentLayout(anbInfo->colorBufferHandle,
3651                                         VK_IMAGE_LAYOUT_PRESENT_SRC_KHR);
3652         }
3653 
3654         return syncImageToColorBuffer(vk, queueInfo->queueFamilyIndex, queue, queueInfo->lock,
3655                                       waitSemaphoreCount, pWaitSemaphores, pNativeFenceFd, anbInfo);
3656     }
3657 
on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory,uint64_t * pAddress)3658     VkResult on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool* pool,
3659                                                   VkDevice boxed_device, VkDeviceMemory memory,
3660                                                   uint64_t* pAddress) {
3661         auto device = unbox_VkDevice(boxed_device);
3662         auto vk = dispatch_VkDevice(boxed_device);
3663 
3664         if (!feature_is_enabled(kFeature_GLDirectMem)) {
3665             fprintf(stderr,
3666                     "FATAL: Tried to use direct mapping "
3667                     "while GLDirectMem is not enabled!\n");
3668         }
3669 
3670         std::lock_guard<std::recursive_mutex> lock(mLock);
3671 
3672         if (mLogging) {
3673             fprintf(stderr, "%s: deviceMemory: 0x%llx pAddress: 0x%llx\n", __func__,
3674                     (unsigned long long)memory, (unsigned long long)(*pAddress));
3675         }
3676 
3677         if (!mapHostVisibleMemoryToGuestPhysicalAddressLocked(vk, device, memory, *pAddress)) {
3678             return VK_ERROR_OUT_OF_HOST_MEMORY;
3679         }
3680 
3681         auto* info = android::base::find(mMemoryInfo, memory);
3682         if (!info) return VK_ERROR_INITIALIZATION_FAILED;
3683 
3684         *pAddress = (uint64_t)(uintptr_t)info->ptr;
3685 
3686         return VK_SUCCESS;
3687     }
3688 
vkGetBlobInternal(VkDevice boxed_device,VkDeviceMemory memory,uint64_t hostBlobId)3689     VkResult vkGetBlobInternal(VkDevice boxed_device, VkDeviceMemory memory, uint64_t hostBlobId) {
3690         std::lock_guard<std::recursive_mutex> lock(mLock);
3691         struct MemEntry entry = {0};
3692 
3693         auto* info = android::base::find(mMemoryInfo, memory);
3694         if (!info) return VK_ERROR_OUT_OF_HOST_MEMORY;
3695 
3696         hostBlobId = (info->blobId && !hostBlobId) ? info->blobId : hostBlobId;
3697 
3698         if (feature_is_enabled(kFeature_SystemBlob) && info->sharedMemory.has_value()) {
3699             uint32_t handleType = STREAM_MEM_HANDLE_TYPE_SHM;
3700             // We transfer ownership of the shared memory handle to the descriptor info.
3701             // The memory itself is destroyed only when all processes unmap / release their
3702             // handles.
3703             HostmemIdMapping::get()->addDescriptorInfo(hostBlobId,
3704                                                        info->sharedMemory->releaseHandle(),
3705                                                        handleType, info->caching, std::nullopt);
3706         } else if (feature_is_enabled(kFeature_ExternalBlob)) {
3707             VkResult result;
3708             auto device = unbox_VkDevice(boxed_device);
3709             DescriptorType handle;
3710             uint32_t handleType;
3711             struct VulkanInfo vulkanInfo = {
3712                 .memoryIndex = info->memoryIndex,
3713             };
3714             memcpy(vulkanInfo.deviceUUID, m_emu->deviceInfo.idProps.deviceUUID,
3715                    sizeof(vulkanInfo.deviceUUID));
3716             memcpy(vulkanInfo.driverUUID, m_emu->deviceInfo.idProps.driverUUID,
3717                    sizeof(vulkanInfo.driverUUID));
3718 
3719 #ifdef __unix__
3720             VkMemoryGetFdInfoKHR getFd = {
3721                 .sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
3722                 .pNext = nullptr,
3723                 .memory = memory,
3724                 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
3725             };
3726 
3727             handleType = STREAM_MEM_HANDLE_TYPE_OPAQUE_FD;
3728 #endif
3729 
3730 #ifdef __linux__
3731             if (hasDeviceExtension(device, VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME)) {
3732                 getFd.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
3733                 handleType = STREAM_MEM_HANDLE_TYPE_DMABUF;
3734             }
3735 #endif
3736 
3737 #ifdef __unix__
3738             result = m_emu->deviceInfo.getMemoryHandleFunc(device, &getFd, &handle);
3739             if (result != VK_SUCCESS) {
3740                 return result;
3741             }
3742 #endif
3743 
3744 #ifdef _WIN32
3745             VkMemoryGetWin32HandleInfoKHR getHandle = {
3746                 .sType = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR,
3747                 .pNext = nullptr,
3748                 .memory = memory,
3749                 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
3750             };
3751 
3752             handleType = STREAM_MEM_HANDLE_TYPE_OPAQUE_WIN32;
3753 
3754             result = m_emu->deviceInfo.getMemoryHandleFunc(device, &getHandle, &handle);
3755             if (result != VK_SUCCESS) {
3756                 return result;
3757             }
3758 #endif
3759 
3760             ManagedDescriptor managedHandle(handle);
3761             HostmemIdMapping::get()->addDescriptorInfo(hostBlobId, std::move(managedHandle),
3762                                                        handleType, info->caching,
3763                                                        std::optional<VulkanInfo>(vulkanInfo));
3764         } else if (!info->needUnmap) {
3765             auto device = unbox_VkDevice(boxed_device);
3766             auto vk = dispatch_VkDevice(boxed_device);
3767 
3768             VkResult mapResult = vk->vkMapMemory(device, memory, 0, info->size, 0, &info->ptr);
3769             if (mapResult != VK_SUCCESS) {
3770                 return VK_ERROR_OUT_OF_HOST_MEMORY;
3771             }
3772 
3773             info->needUnmap = true;
3774         }
3775 
3776         if (info->needUnmap) {
3777             struct MemEntry entry = {0};
3778             uint64_t hva = (uint64_t)(uintptr_t)(info->ptr);
3779             uint64_t size = (uint64_t)(uintptr_t)(info->size);
3780 
3781             uint64_t alignedHva = hva & kPageMaskForBlob;
3782             uint64_t alignedSize =
3783                 kPageSizeforBlob * ((size + kPageSizeforBlob - 1) / kPageSizeforBlob);
3784 
3785             entry.hva = (void*)(uintptr_t)alignedHva;
3786             entry.size = alignedSize;
3787             entry.caching = info->caching;
3788 
3789             HostmemIdMapping::get()->addMapping(hostBlobId, &entry);
3790             info->virtioGpuMapped = true;
3791             info->hostmemId = hostBlobId;
3792         }
3793 
3794         return VK_SUCCESS;
3795     }
3796 
on_vkGetBlobGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory)3797     VkResult on_vkGetBlobGOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
3798                                 VkDeviceMemory memory) {
3799         return vkGetBlobInternal(boxed_device, memory, 0);
3800     }
3801 
on_vkGetMemoryHostAddressInfoGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory,uint64_t * pAddress,uint64_t * pSize,uint64_t * pHostmemId)3802     VkResult on_vkGetMemoryHostAddressInfoGOOGLE(android::base::BumpPool* pool,
3803                                                  VkDevice boxed_device, VkDeviceMemory memory,
3804                                                  uint64_t* pAddress, uint64_t* pSize,
3805                                                  uint64_t* pHostmemId) {
3806         hostBlobId++;
3807         *pHostmemId = hostBlobId;
3808         return vkGetBlobInternal(boxed_device, memory, hostBlobId);
3809     }
3810 
on_vkFreeMemorySyncGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)3811     VkResult on_vkFreeMemorySyncGOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
3812                                        VkDeviceMemory memory,
3813                                        const VkAllocationCallbacks* pAllocator) {
3814         on_vkFreeMemory(pool, boxed_device, memory, pAllocator);
3815 
3816         return VK_SUCCESS;
3817     }
3818 
on_vkAllocateCommandBuffers(android::base::BumpPool * pool,VkDevice boxed_device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)3819     VkResult on_vkAllocateCommandBuffers(android::base::BumpPool* pool, VkDevice boxed_device,
3820                                          const VkCommandBufferAllocateInfo* pAllocateInfo,
3821                                          VkCommandBuffer* pCommandBuffers) {
3822         auto device = unbox_VkDevice(boxed_device);
3823         auto vk = dispatch_VkDevice(boxed_device);
3824 
3825         VkResult result = vk->vkAllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
3826 
3827         if (result != VK_SUCCESS) {
3828             return result;
3829         }
3830 
3831         std::lock_guard<std::recursive_mutex> lock(mLock);
3832 
3833         auto* deviceInfo = android::base::find(mDeviceInfo, device);
3834         if (!deviceInfo) return VK_ERROR_UNKNOWN;
3835 
3836         for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
3837             mCmdBufferInfo[pCommandBuffers[i]] = CommandBufferInfo();
3838             mCmdBufferInfo[pCommandBuffers[i]].device = device;
3839             mCmdBufferInfo[pCommandBuffers[i]].debugUtilsHelper = deviceInfo->debugUtilsHelper;
3840             mCmdBufferInfo[pCommandBuffers[i]].cmdPool = pAllocateInfo->commandPool;
3841             auto boxed = new_boxed_VkCommandBuffer(pCommandBuffers[i], vk,
3842                                                    false /* does not own dispatch */);
3843             mCmdBufferInfo[pCommandBuffers[i]].boxed = boxed;
3844             pCommandBuffers[i] = (VkCommandBuffer)boxed;
3845         }
3846         return result;
3847     }
3848 
on_vkCreateCommandPool(android::base::BumpPool * pool,VkDevice boxed_device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)3849     VkResult on_vkCreateCommandPool(android::base::BumpPool* pool, VkDevice boxed_device,
3850                                     const VkCommandPoolCreateInfo* pCreateInfo,
3851                                     const VkAllocationCallbacks* pAllocator,
3852                                     VkCommandPool* pCommandPool) {
3853         auto device = unbox_VkDevice(boxed_device);
3854         auto vk = dispatch_VkDevice(boxed_device);
3855 
3856         VkResult result = vk->vkCreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
3857         if (result != VK_SUCCESS) {
3858             return result;
3859         }
3860         std::lock_guard<std::recursive_mutex> lock(mLock);
3861         mCmdPoolInfo[*pCommandPool] = CommandPoolInfo();
3862         auto& cmdPoolInfo = mCmdPoolInfo[*pCommandPool];
3863         cmdPoolInfo.device = device;
3864 
3865         *pCommandPool = new_boxed_non_dispatchable_VkCommandPool(*pCommandPool);
3866         cmdPoolInfo.boxed = *pCommandPool;
3867 
3868         return result;
3869     }
3870 
on_vkDestroyCommandPool(android::base::BumpPool * pool,VkDevice boxed_device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)3871     void on_vkDestroyCommandPool(android::base::BumpPool* pool, VkDevice boxed_device,
3872                                  VkCommandPool commandPool,
3873                                  const VkAllocationCallbacks* pAllocator) {
3874         auto device = unbox_VkDevice(boxed_device);
3875         auto vk = dispatch_VkDevice(boxed_device);
3876 
3877         vk->vkDestroyCommandPool(device, commandPool, pAllocator);
3878         std::lock_guard<std::recursive_mutex> lock(mLock);
3879         const auto* cmdPoolInfo = android::base::find(mCmdPoolInfo, commandPool);
3880         if (cmdPoolInfo) {
3881             removeCommandBufferInfo(cmdPoolInfo->cmdBuffers);
3882             mCmdPoolInfo.erase(commandPool);
3883         }
3884     }
3885 
on_vkResetCommandPool(android::base::BumpPool * pool,VkDevice boxed_device,VkCommandPool commandPool,VkCommandPoolResetFlags flags)3886     VkResult on_vkResetCommandPool(android::base::BumpPool* pool, VkDevice boxed_device,
3887                                    VkCommandPool commandPool, VkCommandPoolResetFlags flags) {
3888         auto device = unbox_VkDevice(boxed_device);
3889         auto vk = dispatch_VkDevice(boxed_device);
3890 
3891         VkResult result = vk->vkResetCommandPool(device, commandPool, flags);
3892         if (result != VK_SUCCESS) {
3893             return result;
3894         }
3895         return result;
3896     }
3897 
on_vkCmdExecuteCommands(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)3898     void on_vkCmdExecuteCommands(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
3899                                  uint32_t commandBufferCount,
3900                                  const VkCommandBuffer* pCommandBuffers) {
3901         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
3902         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
3903 
3904         vk->vkCmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
3905         std::lock_guard<std::recursive_mutex> lock(mLock);
3906         CommandBufferInfo& cmdBuffer = mCmdBufferInfo[commandBuffer];
3907         cmdBuffer.subCmds.insert(cmdBuffer.subCmds.end(), pCommandBuffers,
3908                                  pCommandBuffers + commandBufferCount);
3909     }
3910 
on_vkQueueSubmit(android::base::BumpPool * pool,VkQueue boxed_queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)3911     VkResult on_vkQueueSubmit(android::base::BumpPool* pool, VkQueue boxed_queue,
3912                               uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence) {
3913         auto queue = unbox_VkQueue(boxed_queue);
3914         auto vk = dispatch_VkQueue(boxed_queue);
3915 
3916         Lock* ql;
3917         {
3918             std::lock_guard<std::recursive_mutex> lock(mLock);
3919 
3920             {
3921                 auto* queueInfo = android::base::find(mQueueInfo, queue);
3922                 if (queueInfo) {
3923                     sBoxedHandleManager.processDelayedRemovesGlobalStateLocked(queueInfo->device);
3924                 }
3925             }
3926 
3927             for (uint32_t i = 0; i < submitCount; i++) {
3928                 const VkSubmitInfo& submit = pSubmits[i];
3929                 for (uint32_t c = 0; c < submit.commandBufferCount; c++) {
3930                     executePreprocessRecursive(0, submit.pCommandBuffers[c]);
3931                 }
3932             }
3933 
3934             auto* queueInfo = android::base::find(mQueueInfo, queue);
3935             if (!queueInfo) return VK_SUCCESS;
3936             ql = queueInfo->lock;
3937         }
3938 
3939         AutoLock qlock(*ql);
3940         auto result = vk->vkQueueSubmit(queue, submitCount, pSubmits, fence);
3941 
3942         // After vkQueueSubmit is called, we can signal the conditional variable
3943         // in FenceInfo, so that other threads (e.g. SyncThread) can call
3944         // waitForFence() on this fence.
3945         {
3946             std::lock_guard<std::recursive_mutex> lock(mLock);
3947             auto* fenceInfo = android::base::find(mFenceInfo, fence);
3948             if (fenceInfo) {
3949                 fenceInfo->state = FenceInfo::State::kWaitable;
3950                 fenceInfo->lock.lock();
3951                 fenceInfo->cv.signalAndUnlock(&fenceInfo->lock);
3952             }
3953         }
3954 
3955         return result;
3956     }
3957 
on_vkQueueWaitIdle(android::base::BumpPool * pool,VkQueue boxed_queue)3958     VkResult on_vkQueueWaitIdle(android::base::BumpPool* pool, VkQueue boxed_queue) {
3959         auto queue = unbox_VkQueue(boxed_queue);
3960         auto vk = dispatch_VkQueue(boxed_queue);
3961 
3962         if (!queue) return VK_SUCCESS;
3963 
3964         Lock* ql;
3965         {
3966             std::lock_guard<std::recursive_mutex> lock(mLock);
3967             auto* queueInfo = android::base::find(mQueueInfo, queue);
3968             if (!queueInfo) return VK_SUCCESS;
3969             ql = queueInfo->lock;
3970         }
3971 
3972         AutoLock qlock(*ql);
3973         return vk->vkQueueWaitIdle(queue);
3974     }
3975 
on_vkResetCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkCommandBufferResetFlags flags)3976     VkResult on_vkResetCommandBuffer(android::base::BumpPool* pool,
3977                                      VkCommandBuffer boxed_commandBuffer,
3978                                      VkCommandBufferResetFlags flags) {
3979         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
3980         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
3981 
3982         VkResult result = vk->vkResetCommandBuffer(commandBuffer, flags);
3983         if (VK_SUCCESS == result) {
3984             std::lock_guard<std::recursive_mutex> lock(mLock);
3985             auto& bufferInfo = mCmdBufferInfo[commandBuffer];
3986             bufferInfo.reset();
3987         }
3988         return result;
3989     }
3990 
on_vkFreeCommandBuffers(android::base::BumpPool * pool,VkDevice boxed_device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)3991     void on_vkFreeCommandBuffers(android::base::BumpPool* pool, VkDevice boxed_device,
3992                                  VkCommandPool commandPool, uint32_t commandBufferCount,
3993                                  const VkCommandBuffer* pCommandBuffers) {
3994         auto device = unbox_VkDevice(boxed_device);
3995         auto vk = dispatch_VkDevice(boxed_device);
3996 
3997         if (!device) return;
3998         vk->vkFreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
3999         std::lock_guard<std::recursive_mutex> lock(mLock);
4000         for (uint32_t i = 0; i < commandBufferCount; i++) {
4001             const auto& cmdBufferInfoIt = mCmdBufferInfo.find(pCommandBuffers[i]);
4002             if (cmdBufferInfoIt != mCmdBufferInfo.end()) {
4003                 const auto& cmdPoolInfoIt = mCmdPoolInfo.find(cmdBufferInfoIt->second.cmdPool);
4004                 if (cmdPoolInfoIt != mCmdPoolInfo.end()) {
4005                     cmdPoolInfoIt->second.cmdBuffers.erase(pCommandBuffers[i]);
4006                 }
4007                 // Done in decoder
4008                 // delete_VkCommandBuffer(cmdBufferInfoIt->second.boxed);
4009                 mCmdBufferInfo.erase(cmdBufferInfoIt);
4010             }
4011         }
4012     }
4013 
on_vkGetPhysicalDeviceExternalSemaphoreProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)4014     void on_vkGetPhysicalDeviceExternalSemaphoreProperties(
4015         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
4016         const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
4017         VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
4018         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
4019 
4020         if (!physicalDevice) {
4021             return;
4022         }
4023         // Cannot forward this call to driver because nVidia linux driver crahses on it.
4024         switch (pExternalSemaphoreInfo->handleType) {
4025             case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT:
4026                 pExternalSemaphoreProperties->exportFromImportedHandleTypes =
4027                     VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT;
4028                 pExternalSemaphoreProperties->compatibleHandleTypes =
4029                     VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT;
4030                 pExternalSemaphoreProperties->externalSemaphoreFeatures =
4031                     VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT |
4032                     VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT;
4033                 return;
4034             case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT:
4035                 pExternalSemaphoreProperties->exportFromImportedHandleTypes =
4036                     VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
4037                 pExternalSemaphoreProperties->compatibleHandleTypes =
4038                     VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
4039                 pExternalSemaphoreProperties->externalSemaphoreFeatures =
4040                     VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT |
4041                     VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT;
4042                 return;
4043             default:
4044                 break;
4045         }
4046 
4047         pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0;
4048         pExternalSemaphoreProperties->compatibleHandleTypes = 0;
4049         pExternalSemaphoreProperties->externalSemaphoreFeatures = 0;
4050     }
4051 
on_vkCreateDescriptorUpdateTemplate(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)4052     VkResult on_vkCreateDescriptorUpdateTemplate(
4053         android::base::BumpPool* pool, VkDevice boxed_device,
4054         const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
4055         const VkAllocationCallbacks* pAllocator,
4056         VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
4057         auto device = unbox_VkDevice(boxed_device);
4058         auto vk = dispatch_VkDevice(boxed_device);
4059 
4060         auto descriptorUpdateTemplateInfo = calcLinearizedDescriptorUpdateTemplateInfo(pCreateInfo);
4061 
4062         VkResult res =
4063             vk->vkCreateDescriptorUpdateTemplate(device, &descriptorUpdateTemplateInfo.createInfo,
4064                                                  pAllocator, pDescriptorUpdateTemplate);
4065 
4066         if (res == VK_SUCCESS) {
4067             registerDescriptorUpdateTemplate(*pDescriptorUpdateTemplate,
4068                                              descriptorUpdateTemplateInfo);
4069             *pDescriptorUpdateTemplate =
4070                 new_boxed_non_dispatchable_VkDescriptorUpdateTemplate(*pDescriptorUpdateTemplate);
4071         }
4072 
4073         return res;
4074     }
4075 
on_vkCreateDescriptorUpdateTemplateKHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)4076     VkResult on_vkCreateDescriptorUpdateTemplateKHR(
4077         android::base::BumpPool* pool, VkDevice boxed_device,
4078         const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
4079         const VkAllocationCallbacks* pAllocator,
4080         VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
4081         auto device = unbox_VkDevice(boxed_device);
4082         auto vk = dispatch_VkDevice(boxed_device);
4083 
4084         auto descriptorUpdateTemplateInfo = calcLinearizedDescriptorUpdateTemplateInfo(pCreateInfo);
4085 
4086         VkResult res = vk->vkCreateDescriptorUpdateTemplateKHR(
4087             device, &descriptorUpdateTemplateInfo.createInfo, pAllocator,
4088             pDescriptorUpdateTemplate);
4089 
4090         if (res == VK_SUCCESS) {
4091             registerDescriptorUpdateTemplate(*pDescriptorUpdateTemplate,
4092                                              descriptorUpdateTemplateInfo);
4093             *pDescriptorUpdateTemplate =
4094                 new_boxed_non_dispatchable_VkDescriptorUpdateTemplate(*pDescriptorUpdateTemplate);
4095         }
4096 
4097         return res;
4098     }
4099 
on_vkDestroyDescriptorUpdateTemplate(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)4100     void on_vkDestroyDescriptorUpdateTemplate(android::base::BumpPool* pool, VkDevice boxed_device,
4101                                               VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4102                                               const VkAllocationCallbacks* pAllocator) {
4103         auto device = unbox_VkDevice(boxed_device);
4104         auto vk = dispatch_VkDevice(boxed_device);
4105 
4106         vk->vkDestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
4107 
4108         unregisterDescriptorUpdateTemplate(descriptorUpdateTemplate);
4109     }
4110 
on_vkDestroyDescriptorUpdateTemplateKHR(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)4111     void on_vkDestroyDescriptorUpdateTemplateKHR(
4112         android::base::BumpPool* pool, VkDevice boxed_device,
4113         VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4114         const VkAllocationCallbacks* pAllocator) {
4115         auto device = unbox_VkDevice(boxed_device);
4116         auto vk = dispatch_VkDevice(boxed_device);
4117 
4118         vk->vkDestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
4119 
4120         unregisterDescriptorUpdateTemplate(descriptorUpdateTemplate);
4121     }
4122 
on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,uint32_t imageInfoCount,uint32_t bufferInfoCount,uint32_t bufferViewCount,const uint32_t * pImageInfoEntryIndices,const uint32_t * pBufferInfoEntryIndices,const uint32_t * pBufferViewEntryIndices,const VkDescriptorImageInfo * pImageInfos,const VkDescriptorBufferInfo * pBufferInfos,const VkBufferView * pBufferViews)4123     void on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
4124         android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
4125         VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
4126         uint32_t bufferInfoCount, uint32_t bufferViewCount, const uint32_t* pImageInfoEntryIndices,
4127         const uint32_t* pBufferInfoEntryIndices, const uint32_t* pBufferViewEntryIndices,
4128         const VkDescriptorImageInfo* pImageInfos, const VkDescriptorBufferInfo* pBufferInfos,
4129         const VkBufferView* pBufferViews) {
4130         auto device = unbox_VkDevice(boxed_device);
4131         auto vk = dispatch_VkDevice(boxed_device);
4132 
4133         std::lock_guard<std::recursive_mutex> lock(mLock);
4134         auto* info = android::base::find(mDescriptorUpdateTemplateInfo, descriptorUpdateTemplate);
4135         if (!info) return;
4136 
4137         memcpy(info->data.data() + info->imageInfoStart, pImageInfos,
4138                imageInfoCount * sizeof(VkDescriptorImageInfo));
4139         memcpy(info->data.data() + info->bufferInfoStart, pBufferInfos,
4140                bufferInfoCount * sizeof(VkDescriptorBufferInfo));
4141         memcpy(info->data.data() + info->bufferViewStart, pBufferViews,
4142                bufferViewCount * sizeof(VkBufferView));
4143 
4144         vk->vkUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate,
4145                                               info->data.data());
4146     }
4147 
on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,uint32_t imageInfoCount,uint32_t bufferInfoCount,uint32_t bufferViewCount,uint32_t inlineUniformBlockCount,const uint32_t * pImageInfoEntryIndices,const uint32_t * pBufferInfoEntryIndices,const uint32_t * pBufferViewEntryIndices,const VkDescriptorImageInfo * pImageInfos,const VkDescriptorBufferInfo * pBufferInfos,const VkBufferView * pBufferViews,const uint8_t * pInlineUniformBlockData)4148     void on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(
4149         android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
4150         VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
4151         uint32_t bufferInfoCount, uint32_t bufferViewCount, uint32_t inlineUniformBlockCount,
4152         const uint32_t* pImageInfoEntryIndices, const uint32_t* pBufferInfoEntryIndices,
4153         const uint32_t* pBufferViewEntryIndices, const VkDescriptorImageInfo* pImageInfos,
4154         const VkDescriptorBufferInfo* pBufferInfos, const VkBufferView* pBufferViews,
4155         const uint8_t* pInlineUniformBlockData) {
4156         auto device = unbox_VkDevice(boxed_device);
4157         auto vk = dispatch_VkDevice(boxed_device);
4158 
4159         std::lock_guard<std::recursive_mutex> lock(mLock);
4160         auto* info = android::base::find(mDescriptorUpdateTemplateInfo, descriptorUpdateTemplate);
4161         if (!info) return;
4162 
4163         memcpy(info->data.data() + info->imageInfoStart, pImageInfos,
4164                imageInfoCount * sizeof(VkDescriptorImageInfo));
4165         memcpy(info->data.data() + info->bufferInfoStart, pBufferInfos,
4166                bufferInfoCount * sizeof(VkDescriptorBufferInfo));
4167         memcpy(info->data.data() + info->bufferViewStart, pBufferViews,
4168                bufferViewCount * sizeof(VkBufferView));
4169         memcpy(info->data.data() + info->inlineUniformBlockStart, pInlineUniformBlockData,
4170                inlineUniformBlockCount);
4171 
4172         vk->vkUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate,
4173                                               info->data.data());
4174     }
4175 
hostSyncCommandBuffer(const char * tag,VkCommandBuffer boxed_commandBuffer,uint32_t needHostSync,uint32_t sequenceNumber)4176     void hostSyncCommandBuffer(const char* tag, VkCommandBuffer boxed_commandBuffer,
4177                                uint32_t needHostSync, uint32_t sequenceNumber) {
4178         auto nextDeadline = []() {
4179             return android::base::getUnixTimeUs() + 10000;  // 10 ms
4180         };
4181 
4182         auto timeoutDeadline = android::base::getUnixTimeUs() + 5000000;  // 5 s
4183 
4184         OrderMaintenanceInfo* order = ordmaint_VkCommandBuffer(boxed_commandBuffer);
4185         if (!order) return;
4186 
4187         AutoLock lock(order->lock);
4188 
4189         if (needHostSync) {
4190             while (
4191                 (sequenceNumber - __atomic_load_n(&order->sequenceNumber, __ATOMIC_ACQUIRE) != 1)) {
4192                 auto waitUntilUs = nextDeadline();
4193                 order->cv.timedWait(&order->lock, waitUntilUs);
4194 
4195                 if (timeoutDeadline < android::base::getUnixTimeUs()) {
4196                     break;
4197                 }
4198             }
4199         }
4200 
4201         __atomic_store_n(&order->sequenceNumber, sequenceNumber, __ATOMIC_RELEASE);
4202         order->cv.signal();
4203         releaseOrderMaintInfo(order);
4204     }
4205 
on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,uint32_t needHostSync,uint32_t sequenceNumber)4206     void on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool* pool,
4207                                           VkCommandBuffer commandBuffer, uint32_t needHostSync,
4208                                           uint32_t sequenceNumber) {
4209         this->hostSyncCommandBuffer("hostSync", commandBuffer, needHostSync, sequenceNumber);
4210     }
4211 
hostSyncQueue(const char * tag,VkQueue boxed_queue,uint32_t needHostSync,uint32_t sequenceNumber)4212     void hostSyncQueue(const char* tag, VkQueue boxed_queue, uint32_t needHostSync,
4213                        uint32_t sequenceNumber) {
4214         auto nextDeadline = []() {
4215             return android::base::getUnixTimeUs() + 10000;  // 10 ms
4216         };
4217 
4218         auto timeoutDeadline = android::base::getUnixTimeUs() + 5000000;  // 5 s
4219 
4220         OrderMaintenanceInfo* order = ordmaint_VkQueue(boxed_queue);
4221         if (!order) return;
4222 
4223         AutoLock lock(order->lock);
4224 
4225         if (needHostSync) {
4226             while (
4227                 (sequenceNumber - __atomic_load_n(&order->sequenceNumber, __ATOMIC_ACQUIRE) != 1)) {
4228                 auto waitUntilUs = nextDeadline();
4229                 order->cv.timedWait(&order->lock, waitUntilUs);
4230 
4231                 if (timeoutDeadline < android::base::getUnixTimeUs()) {
4232                     break;
4233                 }
4234             }
4235         }
4236 
4237         __atomic_store_n(&order->sequenceNumber, sequenceNumber, __ATOMIC_RELEASE);
4238         order->cv.signal();
4239         releaseOrderMaintInfo(order);
4240     }
4241 
on_vkQueueHostSyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t needHostSync,uint32_t sequenceNumber)4242     void on_vkQueueHostSyncGOOGLE(android::base::BumpPool* pool, VkQueue queue,
4243                                   uint32_t needHostSync, uint32_t sequenceNumber) {
4244         this->hostSyncQueue("hostSyncQueue", queue, needHostSync, sequenceNumber);
4245     }
4246 
on_vkCreateImageWithRequirementsGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage,VkMemoryRequirements * pMemoryRequirements)4247     VkResult on_vkCreateImageWithRequirementsGOOGLE(android::base::BumpPool* pool,
4248                                                     VkDevice boxed_device,
4249                                                     const VkImageCreateInfo* pCreateInfo,
4250                                                     const VkAllocationCallbacks* pAllocator,
4251                                                     VkImage* pImage,
4252                                                     VkMemoryRequirements* pMemoryRequirements) {
4253         if (pMemoryRequirements) {
4254             memset(pMemoryRequirements, 0, sizeof(*pMemoryRequirements));
4255         }
4256 
4257         VkResult imageCreateRes =
4258             on_vkCreateImage(pool, boxed_device, pCreateInfo, pAllocator, pImage);
4259 
4260         if (imageCreateRes != VK_SUCCESS) {
4261             return imageCreateRes;
4262         }
4263 
4264         on_vkGetImageMemoryRequirements(pool, boxed_device, unbox_VkImage(*pImage),
4265                                         pMemoryRequirements);
4266 
4267         return imageCreateRes;
4268     }
4269 
on_vkCreateBufferWithRequirementsGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer,VkMemoryRequirements * pMemoryRequirements)4270     VkResult on_vkCreateBufferWithRequirementsGOOGLE(android::base::BumpPool* pool,
4271                                                      VkDevice boxed_device,
4272                                                      const VkBufferCreateInfo* pCreateInfo,
4273                                                      const VkAllocationCallbacks* pAllocator,
4274                                                      VkBuffer* pBuffer,
4275                                                      VkMemoryRequirements* pMemoryRequirements) {
4276         if (pMemoryRequirements) {
4277             memset(pMemoryRequirements, 0, sizeof(*pMemoryRequirements));
4278         }
4279 
4280         VkResult bufferCreateRes =
4281             on_vkCreateBuffer(pool, boxed_device, pCreateInfo, pAllocator, pBuffer);
4282 
4283         if (bufferCreateRes != VK_SUCCESS) {
4284             return bufferCreateRes;
4285         }
4286 
4287         auto device = unbox_VkDevice(boxed_device);
4288         auto vk = dispatch_VkDevice(boxed_device);
4289 
4290         vk->vkGetBufferMemoryRequirements(device, unbox_VkBuffer(*pBuffer), pMemoryRequirements);
4291 
4292         return bufferCreateRes;
4293     }
4294 
on_vkBeginCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo,const VkDecoderContext & context)4295     VkResult on_vkBeginCommandBuffer(android::base::BumpPool* pool,
4296                                      VkCommandBuffer boxed_commandBuffer,
4297                                      const VkCommandBufferBeginInfo* pBeginInfo,
4298                                      const VkDecoderContext& context) {
4299         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4300         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4301         VkResult result = vk->vkBeginCommandBuffer(commandBuffer, pBeginInfo);
4302 
4303         if (result != VK_SUCCESS) {
4304             return result;
4305         }
4306 
4307         std::lock_guard<std::recursive_mutex> lock(mLock);
4308 
4309         auto* commandBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
4310         if (!commandBufferInfo) return VK_ERROR_UNKNOWN;
4311         commandBufferInfo->reset();
4312 
4313         if (context.processName) {
4314             commandBufferInfo->debugUtilsHelper.cmdBeginDebugLabel(commandBuffer, "Process %s",
4315                                                                    context.processName);
4316         }
4317 
4318         return VK_SUCCESS;
4319     }
4320 
on_vkBeginCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo,const VkDecoderContext & context)4321     VkResult on_vkBeginCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
4322                                                 VkCommandBuffer boxed_commandBuffer,
4323                                                 const VkCommandBufferBeginInfo* pBeginInfo,
4324                                                 const VkDecoderContext& context) {
4325         return this->on_vkBeginCommandBuffer(pool, boxed_commandBuffer, pBeginInfo, context);
4326     }
4327 
on_vkEndCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkDecoderContext & context)4328     VkResult on_vkEndCommandBuffer(android::base::BumpPool* pool,
4329                                    VkCommandBuffer boxed_commandBuffer,
4330                                    const VkDecoderContext& context) {
4331         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4332         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4333 
4334         std::lock_guard<std::recursive_mutex> lock(mLock);
4335 
4336         auto* commandBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
4337         if (!commandBufferInfo) return VK_ERROR_UNKNOWN;
4338 
4339         if (context.processName) {
4340             commandBufferInfo->debugUtilsHelper.cmdEndDebugLabel(commandBuffer);
4341         }
4342 
4343         return vk->vkEndCommandBuffer(commandBuffer);
4344     }
4345 
on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkDecoderContext & context)4346     void on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
4347                                           VkCommandBuffer boxed_commandBuffer,
4348                                           const VkDecoderContext& context) {
4349         on_vkEndCommandBuffer(pool, boxed_commandBuffer, context);
4350     }
4351 
on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkCommandBufferResetFlags flags)4352     void on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
4353                                             VkCommandBuffer boxed_commandBuffer,
4354                                             VkCommandBufferResetFlags flags) {
4355         on_vkResetCommandBuffer(pool, boxed_commandBuffer, flags);
4356     }
4357 
on_vkCmdBindPipeline(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)4358     void on_vkCmdBindPipeline(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
4359                               VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
4360         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4361         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4362         vk->vkCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
4363         if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_COMPUTE) {
4364             std::lock_guard<std::recursive_mutex> lock(mLock);
4365             auto* cmdBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
4366             if (cmdBufferInfo) {
4367                 cmdBufferInfo->computePipeline = pipeline;
4368             }
4369         }
4370     }
4371 
on_vkCmdBindDescriptorSets(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)4372     void on_vkCmdBindDescriptorSets(android::base::BumpPool* pool,
4373                                     VkCommandBuffer boxed_commandBuffer,
4374                                     VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
4375                                     uint32_t firstSet, uint32_t descriptorSetCount,
4376                                     const VkDescriptorSet* pDescriptorSets,
4377                                     uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
4378         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4379         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4380         vk->vkCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet,
4381                                     descriptorSetCount, pDescriptorSets, dynamicOffsetCount,
4382                                     pDynamicOffsets);
4383         if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_COMPUTE) {
4384             std::lock_guard<std::recursive_mutex> lock(mLock);
4385             auto* cmdBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
4386             if (cmdBufferInfo) {
4387                 cmdBufferInfo->descriptorLayout = layout;
4388 
4389                 if (descriptorSetCount) {
4390                     cmdBufferInfo->firstSet = firstSet;
4391                     cmdBufferInfo->descriptorSets.assign(pDescriptorSets,
4392                                                          pDescriptorSets + descriptorSetCount);
4393                     cmdBufferInfo->dynamicOffsets.assign(pDynamicOffsets,
4394                                                          pDynamicOffsets + dynamicOffsetCount);
4395                 }
4396             }
4397         }
4398     }
4399 
on_vkCreateRenderPass(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)4400     VkResult on_vkCreateRenderPass(android::base::BumpPool* pool, VkDevice boxed_device,
4401                                    const VkRenderPassCreateInfo* pCreateInfo,
4402                                    const VkAllocationCallbacks* pAllocator,
4403                                    VkRenderPass* pRenderPass) {
4404         auto device = unbox_VkDevice(boxed_device);
4405         auto vk = dispatch_VkDevice(boxed_device);
4406         VkRenderPassCreateInfo createInfo;
4407         bool needReformat = false;
4408         std::lock_guard<std::recursive_mutex> lock(mLock);
4409 
4410         auto* deviceInfo = android::base::find(mDeviceInfo, device);
4411         if (!deviceInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
4412         if (deviceInfo->emulateTextureEtc2 || deviceInfo->emulateTextureAstc) {
4413             for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
4414                 if (deviceInfo->needEmulatedDecompression(pCreateInfo->pAttachments[i].format)) {
4415                     needReformat = true;
4416                     break;
4417                 }
4418             }
4419         }
4420         std::vector<VkAttachmentDescription> attachments;
4421         if (needReformat) {
4422             createInfo = *pCreateInfo;
4423             attachments.assign(pCreateInfo->pAttachments,
4424                                pCreateInfo->pAttachments + pCreateInfo->attachmentCount);
4425             createInfo.pAttachments = attachments.data();
4426             for (auto& attachment : attachments) {
4427                 attachment.format = CompressedImageInfo::getDecompressedFormat(attachment.format);
4428             }
4429             pCreateInfo = &createInfo;
4430         }
4431         VkResult res = vk->vkCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
4432         if (res != VK_SUCCESS) {
4433             return res;
4434         }
4435 
4436         auto& renderPassInfo = mRenderPassInfo[*pRenderPass];
4437         renderPassInfo.device = device;
4438 
4439         *pRenderPass = new_boxed_non_dispatchable_VkRenderPass(*pRenderPass);
4440 
4441         return res;
4442     }
4443 
on_vkCreateRenderPass2(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo2 * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)4444     VkResult on_vkCreateRenderPass2(android::base::BumpPool* pool, VkDevice boxed_device,
4445                                     const VkRenderPassCreateInfo2* pCreateInfo,
4446                                     const VkAllocationCallbacks* pAllocator,
4447                                     VkRenderPass* pRenderPass) {
4448         auto device = unbox_VkDevice(boxed_device);
4449         auto vk = dispatch_VkDevice(boxed_device);
4450         std::lock_guard<std::recursive_mutex> lock(mLock);
4451 
4452         VkResult res = vk->vkCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass);
4453         if (res != VK_SUCCESS) {
4454             return res;
4455         }
4456 
4457         auto& renderPassInfo = mRenderPassInfo[*pRenderPass];
4458         renderPassInfo.device = device;
4459 
4460         *pRenderPass = new_boxed_non_dispatchable_VkRenderPass(*pRenderPass);
4461 
4462         return res;
4463     }
4464 
destroyRenderPassLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)4465     void destroyRenderPassLocked(VkDevice device, VulkanDispatch* deviceDispatch,
4466                                  VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) {
4467         deviceDispatch->vkDestroyRenderPass(device, renderPass, pAllocator);
4468 
4469         mRenderPassInfo.erase(renderPass);
4470     }
4471 
on_vkDestroyRenderPass(android::base::BumpPool * pool,VkDevice boxed_device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)4472     void on_vkDestroyRenderPass(android::base::BumpPool* pool, VkDevice boxed_device,
4473                                 VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) {
4474         auto device = unbox_VkDevice(boxed_device);
4475         auto deviceDispatch = dispatch_VkDevice(boxed_device);
4476 
4477         std::lock_guard<std::recursive_mutex> lock(mLock);
4478         destroyRenderPassLocked(device, deviceDispatch, renderPass, pAllocator);
4479     }
4480 
on_vkCmdCopyQueryPoolResults(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)4481     void on_vkCmdCopyQueryPoolResults(android::base::BumpPool* pool,
4482                                       VkCommandBuffer boxed_commandBuffer, VkQueryPool queryPool,
4483                                       uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
4484                                       VkDeviceSize dstOffset, VkDeviceSize stride,
4485                                       VkQueryResultFlags flags) {
4486         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4487         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4488         if (queryCount == 1 && stride == 0) {
4489             // Some drivers don't seem to handle stride==0 very well.
4490             // In fact, the spec does not say what should happen with stride==0.
4491             // So we just use the largest stride possible.
4492             stride = mBufferInfo[dstBuffer].size - dstOffset;
4493         }
4494         vk->vkCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer,
4495                                       dstOffset, stride, flags);
4496     }
4497 
on_vkCreateFramebuffer(android::base::BumpPool * pool,VkDevice boxed_device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)4498     VkResult on_vkCreateFramebuffer(android::base::BumpPool* pool, VkDevice boxed_device,
4499                                     const VkFramebufferCreateInfo* pCreateInfo,
4500                                     const VkAllocationCallbacks* pAllocator,
4501                                     VkFramebuffer* pFramebuffer) {
4502         auto device = unbox_VkDevice(boxed_device);
4503         auto deviceDispatch = dispatch_VkDevice(boxed_device);
4504 
4505         VkResult result =
4506             deviceDispatch->vkCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
4507         if (result != VK_SUCCESS) {
4508             return result;
4509         }
4510 
4511         std::lock_guard<std::recursive_mutex> lock(mLock);
4512 
4513         auto& framebufferInfo = mFramebufferInfo[*pFramebuffer];
4514         framebufferInfo.device = device;
4515 
4516         *pFramebuffer = new_boxed_non_dispatchable_VkFramebuffer(*pFramebuffer);
4517 
4518         return result;
4519     }
4520 
destroyFramebufferLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)4521     void destroyFramebufferLocked(VkDevice device, VulkanDispatch* deviceDispatch,
4522                                   VkFramebuffer framebuffer,
4523                                   const VkAllocationCallbacks* pAllocator) {
4524         deviceDispatch->vkDestroyFramebuffer(device, framebuffer, pAllocator);
4525 
4526         mFramebufferInfo.erase(framebuffer);
4527     }
4528 
on_vkDestroyFramebuffer(android::base::BumpPool * pool,VkDevice boxed_device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)4529     void on_vkDestroyFramebuffer(android::base::BumpPool* pool, VkDevice boxed_device,
4530                                  VkFramebuffer framebuffer,
4531                                  const VkAllocationCallbacks* pAllocator) {
4532         auto device = unbox_VkDevice(boxed_device);
4533         auto deviceDispatch = dispatch_VkDevice(boxed_device);
4534 
4535         std::lock_guard<std::recursive_mutex> lock(mLock);
4536         destroyFramebufferLocked(device, deviceDispatch, framebuffer, pAllocator);
4537     }
4538 
on_vkQueueBindSparse(android::base::BumpPool * pool,VkQueue boxed_queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)4539     VkResult on_vkQueueBindSparse(android::base::BumpPool* pool, VkQueue boxed_queue,
4540                                   uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo,
4541                                   VkFence fence) {
4542         // If pBindInfo contains VkTimelineSemaphoreSubmitInfo, then it's
4543         // possible the host driver isn't equipped to deal with them yet.  To
4544         // work around this, send empty vkQueueSubmits before and after the
4545         // call to vkQueueBindSparse that contain the right values for
4546         // wait/signal semaphores and contains the user's
4547         // VkTimelineSemaphoreSubmitInfo structure, following the *submission
4548         // order* implied by the indices of pBindInfo.
4549 
4550         // TODO: Detect if we are running on a driver that supports timeline
4551         // semaphore signal/wait operations in vkQueueBindSparse
4552         const bool needTimelineSubmitInfoWorkaround = true;
4553         (void)needTimelineSubmitInfoWorkaround;
4554 
4555         bool hasTimelineSemaphoreSubmitInfo = false;
4556 
4557         for (uint32_t i = 0; i < bindInfoCount; ++i) {
4558             const VkTimelineSemaphoreSubmitInfoKHR* tsSi =
4559                 vk_find_struct<VkTimelineSemaphoreSubmitInfoKHR>(pBindInfo + i);
4560             if (tsSi) {
4561                 hasTimelineSemaphoreSubmitInfo = true;
4562             }
4563         }
4564 
4565         auto queue = unbox_VkQueue(boxed_queue);
4566         auto vk = dispatch_VkQueue(boxed_queue);
4567 
4568         if (!hasTimelineSemaphoreSubmitInfo) {
4569             (void)pool;
4570             return vk->vkQueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
4571         } else {
4572             std::vector<VkPipelineStageFlags> waitDstStageMasks;
4573             VkTimelineSemaphoreSubmitInfoKHR currTsSi = {
4574                 VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, 0, 0, nullptr, 0, nullptr,
4575             };
4576 
4577             VkSubmitInfo currSi = {
4578                 VK_STRUCTURE_TYPE_SUBMIT_INFO,
4579                 &currTsSi,
4580                 0,
4581                 nullptr,
4582                 nullptr,
4583                 0,
4584                 nullptr,  // No commands
4585                 0,
4586                 nullptr,
4587             };
4588 
4589             VkBindSparseInfo currBi;
4590 
4591             VkResult res;
4592 
4593             for (uint32_t i = 0; i < bindInfoCount; ++i) {
4594                 const VkTimelineSemaphoreSubmitInfoKHR* tsSi =
4595                     vk_find_struct<VkTimelineSemaphoreSubmitInfoKHR>(pBindInfo + i);
4596                 if (!tsSi) {
4597                     res = vk->vkQueueBindSparse(queue, 1, pBindInfo + i, fence);
4598                     if (VK_SUCCESS != res) return res;
4599                     continue;
4600                 }
4601 
4602                 currTsSi.waitSemaphoreValueCount = tsSi->waitSemaphoreValueCount;
4603                 currTsSi.pWaitSemaphoreValues = tsSi->pWaitSemaphoreValues;
4604                 currTsSi.signalSemaphoreValueCount = 0;
4605                 currTsSi.pSignalSemaphoreValues = nullptr;
4606 
4607                 currSi.waitSemaphoreCount = pBindInfo[i].waitSemaphoreCount;
4608                 currSi.pWaitSemaphores = pBindInfo[i].pWaitSemaphores;
4609                 waitDstStageMasks.resize(pBindInfo[i].waitSemaphoreCount,
4610                                          VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
4611                 currSi.pWaitDstStageMask = waitDstStageMasks.data();
4612 
4613                 currSi.signalSemaphoreCount = 0;
4614                 currSi.pSignalSemaphores = nullptr;
4615 
4616                 res = vk->vkQueueSubmit(queue, 1, &currSi, nullptr);
4617                 if (VK_SUCCESS != res) return res;
4618 
4619                 currBi = pBindInfo[i];
4620 
4621                 vk_struct_chain_remove(tsSi, &currBi);
4622 
4623                 currBi.waitSemaphoreCount = 0;
4624                 currBi.pWaitSemaphores = nullptr;
4625                 currBi.signalSemaphoreCount = 0;
4626                 currBi.pSignalSemaphores = nullptr;
4627 
4628                 res = vk->vkQueueBindSparse(queue, 1, &currBi, nullptr);
4629                 if (VK_SUCCESS != res) return res;
4630 
4631                 currTsSi.waitSemaphoreValueCount = 0;
4632                 currTsSi.pWaitSemaphoreValues = nullptr;
4633                 currTsSi.signalSemaphoreValueCount = tsSi->signalSemaphoreValueCount;
4634                 currTsSi.pSignalSemaphoreValues = tsSi->pSignalSemaphoreValues;
4635 
4636                 currSi.waitSemaphoreCount = 0;
4637                 currSi.pWaitSemaphores = nullptr;
4638                 currSi.signalSemaphoreCount = pBindInfo[i].signalSemaphoreCount;
4639                 currSi.pSignalSemaphores = pBindInfo[i].pSignalSemaphores;
4640 
4641                 res =
4642                     vk->vkQueueSubmit(queue, 1, &currSi, i == bindInfoCount - 1 ? fence : nullptr);
4643                 if (VK_SUCCESS != res) return res;
4644             }
4645 
4646             return VK_SUCCESS;
4647         }
4648     }
4649 
on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkFormat format,VkDeviceSize * pOffset,VkDeviceSize * pRowPitchAlignment)4650     void on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
4651                                          VkFormat format, VkDeviceSize* pOffset,
4652                                          VkDeviceSize* pRowPitchAlignment) {
4653         if (mPerFormatLinearImageProperties.find(format) == mPerFormatLinearImageProperties.end()) {
4654             VkDeviceSize offset = 0u;
4655             VkDeviceSize rowPitchAlignment = UINT_MAX;
4656 
4657             for (uint32_t width = 64; width <= 256; width++) {
4658                 LinearImageCreateInfo linearImageCreateInfo = {
4659                     .extent =
4660                         {
4661                             .width = width,
4662                             .height = 64,
4663                             .depth = 1,
4664                         },
4665                     .format = format,
4666                     .usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
4667                 };
4668 
4669                 VkDeviceSize currOffset = 0u;
4670                 VkDeviceSize currRowPitchAlignment = UINT_MAX;
4671 
4672                 VkImageCreateInfo defaultVkImageCreateInfo = linearImageCreateInfo.toDefaultVk();
4673                 on_vkGetLinearImageLayout2GOOGLE(pool, boxed_device, &defaultVkImageCreateInfo,
4674                                                  &currOffset, &currRowPitchAlignment);
4675 
4676                 offset = currOffset;
4677                 rowPitchAlignment = std::min(currRowPitchAlignment, rowPitchAlignment);
4678             }
4679             mPerFormatLinearImageProperties[format] = LinearImageProperties{
4680                 .offset = offset,
4681                 .rowPitchAlignment = rowPitchAlignment,
4682             };
4683         }
4684 
4685         if (pOffset) {
4686             *pOffset = mPerFormatLinearImageProperties[format].offset;
4687         }
4688         if (pRowPitchAlignment) {
4689             *pRowPitchAlignment = mPerFormatLinearImageProperties[format].rowPitchAlignment;
4690         }
4691     }
4692 
on_vkGetLinearImageLayout2GOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageCreateInfo * pCreateInfo,VkDeviceSize * pOffset,VkDeviceSize * pRowPitchAlignment)4693     void on_vkGetLinearImageLayout2GOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
4694                                           const VkImageCreateInfo* pCreateInfo,
4695                                           VkDeviceSize* pOffset, VkDeviceSize* pRowPitchAlignment) {
4696         LinearImageCreateInfo linearImageCreateInfo = {
4697             .extent = pCreateInfo->extent,
4698             .format = pCreateInfo->format,
4699             .usage = pCreateInfo->usage,
4700         };
4701         if (mLinearImageProperties.find(linearImageCreateInfo) == mLinearImageProperties.end()) {
4702             auto device = unbox_VkDevice(boxed_device);
4703             auto vk = dispatch_VkDevice(boxed_device);
4704 
4705             VkImageSubresource subresource = {
4706                 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
4707                 .mipLevel = 0,
4708                 .arrayLayer = 0,
4709             };
4710 
4711             VkImage image;
4712             VkSubresourceLayout subresourceLayout;
4713 
4714             VkImageCreateInfo defaultVkImageCreateInfo = linearImageCreateInfo.toDefaultVk();
4715             VkResult result = vk->vkCreateImage(device, &defaultVkImageCreateInfo, nullptr, &image);
4716             if (result != VK_SUCCESS) {
4717                 fprintf(stderr, "vkCreateImage failed. size: (%u x %u) result: %d\n",
4718                         linearImageCreateInfo.extent.width, linearImageCreateInfo.extent.height,
4719                         result);
4720                 return;
4721             }
4722             vk->vkGetImageSubresourceLayout(device, image, &subresource, &subresourceLayout);
4723             vk->vkDestroyImage(device, image, nullptr);
4724 
4725             VkDeviceSize offset = subresourceLayout.offset;
4726             uint64_t rowPitch = subresourceLayout.rowPitch;
4727             VkDeviceSize rowPitchAlignment = rowPitch & (~rowPitch + 1);
4728 
4729             mLinearImageProperties[linearImageCreateInfo] = {
4730                 .offset = offset,
4731                 .rowPitchAlignment = rowPitchAlignment,
4732             };
4733         }
4734 
4735         if (pOffset != nullptr) {
4736             *pOffset = mLinearImageProperties[linearImageCreateInfo].offset;
4737         }
4738         if (pRowPitchAlignment != nullptr) {
4739             *pRowPitchAlignment = mLinearImageProperties[linearImageCreateInfo].rowPitchAlignment;
4740         }
4741     }
4742 
4743 #include "VkSubDecoder.cpp"
4744 
on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool * pool,VkQueue queue,VkCommandBuffer boxed_commandBuffer,VkDeviceSize dataSize,const void * pData,const VkDecoderContext & context)4745     void on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool* pool, VkQueue queue,
4746                                        VkCommandBuffer boxed_commandBuffer, VkDeviceSize dataSize,
4747                                        const void* pData, const VkDecoderContext& context) {
4748         (void)queue;
4749 
4750         VkCommandBuffer commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4751         VulkanDispatch* vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4752         VulkanMemReadingStream* readStream = readstream_VkCommandBuffer(boxed_commandBuffer);
4753         subDecode(readStream, vk, boxed_commandBuffer, commandBuffer, dataSize, pData, context);
4754     }
4755 
on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(android::base::BumpPool * pool,VkQueue queue,VkCommandBuffer commandBuffer,VkDeviceMemory deviceMemory,VkDeviceSize dataOffset,VkDeviceSize dataSize,const VkDecoderContext & context)4756     void on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(android::base::BumpPool* pool, VkQueue queue,
4757                                                     VkCommandBuffer commandBuffer,
4758                                                     VkDeviceMemory deviceMemory,
4759                                                     VkDeviceSize dataOffset, VkDeviceSize dataSize,
4760                                                     const VkDecoderContext& context) {
4761         // TODO : implement
4762     }
getOrAllocateDescriptorSetFromPoolAndId(VulkanDispatch * vk,VkDevice device,VkDescriptorPool pool,VkDescriptorSetLayout setLayout,uint64_t poolId,uint32_t pendingAlloc,bool * didAlloc)4763     VkDescriptorSet getOrAllocateDescriptorSetFromPoolAndId(VulkanDispatch* vk, VkDevice device,
4764                                                             VkDescriptorPool pool,
4765                                                             VkDescriptorSetLayout setLayout,
4766                                                             uint64_t poolId, uint32_t pendingAlloc,
4767                                                             bool* didAlloc) {
4768         auto* poolInfo = android::base::find(mDescriptorPoolInfo, pool);
4769         if (!poolInfo) {
4770             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
4771                 << "descriptor pool " << pool << " not found ";
4772         }
4773 
4774         DispatchableHandleInfo<uint64_t>* setHandleInfo = sBoxedHandleManager.get(poolId);
4775 
4776         if (setHandleInfo->underlying) {
4777             if (pendingAlloc) {
4778                 VkDescriptorSet allocedSet;
4779                 vk->vkFreeDescriptorSets(device, pool, 1,
4780                                          (VkDescriptorSet*)(&setHandleInfo->underlying));
4781                 VkDescriptorSetAllocateInfo dsAi = {
4782                     VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, 0, pool, 1, &setLayout,
4783                 };
4784                 vk->vkAllocateDescriptorSets(device, &dsAi, &allocedSet);
4785                 setHandleInfo->underlying = (uint64_t)allocedSet;
4786                 initDescriptorSetInfoLocked(pool, setLayout, poolId, allocedSet);
4787                 *didAlloc = true;
4788                 return allocedSet;
4789             } else {
4790                 *didAlloc = false;
4791                 return (VkDescriptorSet)(setHandleInfo->underlying);
4792             }
4793         } else {
4794             if (pendingAlloc) {
4795                 VkDescriptorSet allocedSet;
4796                 VkDescriptorSetAllocateInfo dsAi = {
4797                     VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, 0, pool, 1, &setLayout,
4798                 };
4799                 vk->vkAllocateDescriptorSets(device, &dsAi, &allocedSet);
4800                 setHandleInfo->underlying = (uint64_t)allocedSet;
4801                 initDescriptorSetInfoLocked(pool, setLayout, poolId, allocedSet);
4802                 *didAlloc = true;
4803                 return allocedSet;
4804             } else {
4805                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
4806                     << "descriptor pool " << pool << " wanted to get set with id 0x" << std::hex
4807                     << poolId;
4808                 return nullptr;
4809             }
4810         }
4811     }
4812 
on_vkQueueCommitDescriptorSetUpdatesGOOGLE(android::base::BumpPool * pool,VkQueue boxed_queue,uint32_t descriptorPoolCount,const VkDescriptorPool * pDescriptorPools,uint32_t descriptorSetCount,const VkDescriptorSetLayout * pDescriptorSetLayouts,const uint64_t * pDescriptorSetPoolIds,const uint32_t * pDescriptorSetWhichPool,const uint32_t * pDescriptorSetPendingAllocation,const uint32_t * pDescriptorWriteStartingIndices,uint32_t pendingDescriptorWriteCount,const VkWriteDescriptorSet * pPendingDescriptorWrites)4813     void on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
4814         android::base::BumpPool* pool, VkQueue boxed_queue, uint32_t descriptorPoolCount,
4815         const VkDescriptorPool* pDescriptorPools, uint32_t descriptorSetCount,
4816         const VkDescriptorSetLayout* pDescriptorSetLayouts, const uint64_t* pDescriptorSetPoolIds,
4817         const uint32_t* pDescriptorSetWhichPool, const uint32_t* pDescriptorSetPendingAllocation,
4818         const uint32_t* pDescriptorWriteStartingIndices, uint32_t pendingDescriptorWriteCount,
4819         const VkWriteDescriptorSet* pPendingDescriptorWrites) {
4820         std::lock_guard<std::recursive_mutex> lock(mLock);
4821 
4822         VkDevice device;
4823 
4824         auto queue = unbox_VkQueue(boxed_queue);
4825         auto vk = dispatch_VkQueue(boxed_queue);
4826 
4827         auto* queueInfo = android::base::find(mQueueInfo, queue);
4828         if (queueInfo) {
4829             device = queueInfo->device;
4830         } else {
4831             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
4832                 << "queue " << queue << "(boxed: " << boxed_queue << ") with no device registered";
4833         }
4834 
4835         std::vector<VkDescriptorSet> setsToUpdate(descriptorSetCount, nullptr);
4836 
4837         bool didAlloc = false;
4838 
4839         for (uint32_t i = 0; i < descriptorSetCount; ++i) {
4840             uint64_t poolId = pDescriptorSetPoolIds[i];
4841             uint32_t whichPool = pDescriptorSetWhichPool[i];
4842             uint32_t pendingAlloc = pDescriptorSetPendingAllocation[i];
4843             bool didAllocThisTime;
4844             setsToUpdate[i] = getOrAllocateDescriptorSetFromPoolAndId(
4845                 vk, device, pDescriptorPools[whichPool], pDescriptorSetLayouts[i], poolId,
4846                 pendingAlloc, &didAllocThisTime);
4847 
4848             if (didAllocThisTime) didAlloc = true;
4849         }
4850 
4851         if (didAlloc) {
4852             std::vector<VkWriteDescriptorSet> writeDescriptorSetsForHostDriver(
4853                 pendingDescriptorWriteCount);
4854             memcpy(writeDescriptorSetsForHostDriver.data(), pPendingDescriptorWrites,
4855                    pendingDescriptorWriteCount * sizeof(VkWriteDescriptorSet));
4856 
4857             for (uint32_t i = 0; i < descriptorSetCount; ++i) {
4858                 uint32_t writeStartIndex = pDescriptorWriteStartingIndices[i];
4859                 uint32_t writeEndIndex;
4860                 if (i == descriptorSetCount - 1) {
4861                     writeEndIndex = pendingDescriptorWriteCount;
4862                 } else {
4863                     writeEndIndex = pDescriptorWriteStartingIndices[i + 1];
4864                 }
4865 
4866                 for (uint32_t j = writeStartIndex; j < writeEndIndex; ++j) {
4867                     writeDescriptorSetsForHostDriver[j].dstSet = setsToUpdate[i];
4868                 }
4869             }
4870             this->on_vkUpdateDescriptorSetsImpl(
4871                 pool, vk, device, (uint32_t)writeDescriptorSetsForHostDriver.size(),
4872                 writeDescriptorSetsForHostDriver.data(), 0, nullptr);
4873         } else {
4874             this->on_vkUpdateDescriptorSetsImpl(pool, vk, device, pendingDescriptorWriteCount,
4875                                                 pPendingDescriptorWrites, 0, nullptr);
4876         }
4877     }
4878 
on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,uint32_t * pPoolIdCount,uint64_t * pPoolIds)4879     void on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool* pool, VkDevice device,
4880                                              VkDescriptorPool descriptorPool,
4881                                              uint32_t* pPoolIdCount, uint64_t* pPoolIds) {
4882         std::lock_guard<std::recursive_mutex> lock(mLock);
4883         auto& info = mDescriptorPoolInfo[descriptorPool];
4884         *pPoolIdCount = (uint32_t)info.poolIds.size();
4885 
4886         if (pPoolIds) {
4887             for (uint32_t i = 0; i < info.poolIds.size(); ++i) {
4888                 pPoolIds[i] = info.poolIds[i];
4889             }
4890         }
4891     }
4892 
on_vkCreateSamplerYcbcrConversion(android::base::BumpPool *,VkDevice boxed_device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)4893     VkResult on_vkCreateSamplerYcbcrConversion(
4894         android::base::BumpPool*, VkDevice boxed_device,
4895         const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
4896         const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) {
4897         if (m_emu->enableYcbcrEmulation && !m_emu->deviceInfo.supportsSamplerYcbcrConversion) {
4898             *pYcbcrConversion = new_boxed_non_dispatchable_VkSamplerYcbcrConversion(
4899                 (VkSamplerYcbcrConversion)((uintptr_t)0xffff0000ull));
4900             return VK_SUCCESS;
4901         }
4902         auto device = unbox_VkDevice(boxed_device);
4903         auto vk = dispatch_VkDevice(boxed_device);
4904         VkResult res =
4905             vk->vkCreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion);
4906         if (res != VK_SUCCESS) {
4907             return res;
4908         }
4909         *pYcbcrConversion = new_boxed_non_dispatchable_VkSamplerYcbcrConversion(*pYcbcrConversion);
4910         return VK_SUCCESS;
4911     }
4912 
on_vkDestroySamplerYcbcrConversion(android::base::BumpPool * pool,VkDevice boxed_device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)4913     void on_vkDestroySamplerYcbcrConversion(android::base::BumpPool* pool, VkDevice boxed_device,
4914                                             VkSamplerYcbcrConversion ycbcrConversion,
4915                                             const VkAllocationCallbacks* pAllocator) {
4916         if (m_emu->enableYcbcrEmulation && !m_emu->deviceInfo.supportsSamplerYcbcrConversion) {
4917             return;
4918         }
4919         auto device = unbox_VkDevice(boxed_device);
4920         auto vk = dispatch_VkDevice(boxed_device);
4921         vk->vkDestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
4922         return;
4923     }
4924 
on_DeviceLost()4925     void on_DeviceLost() { GFXSTREAM_ABORT(FatalError(VK_ERROR_DEVICE_LOST)); }
4926 
DeviceLostHandler()4927     void DeviceLostHandler() {}
4928 
on_CheckOutOfMemory(VkResult result,uint32_t opCode,const VkDecoderContext & context,std::optional<uint64_t> allocationSize=std::nullopt)4929     void on_CheckOutOfMemory(VkResult result, uint32_t opCode, const VkDecoderContext& context,
4930                              std::optional<uint64_t> allocationSize = std::nullopt) {
4931         if (result == VK_ERROR_OUT_OF_HOST_MEMORY || result == VK_ERROR_OUT_OF_DEVICE_MEMORY ||
4932             result == VK_ERROR_OUT_OF_POOL_MEMORY) {
4933             context.metricsLogger->logMetricEvent(
4934                 MetricEventVulkanOutOfMemory{.vkResultCode = result,
4935                                              .opCode = std::make_optional(opCode),
4936                                              .allocationSize = allocationSize});
4937         }
4938     }
4939 
waitForFence(VkFence boxed_fence,uint64_t timeout)4940     VkResult waitForFence(VkFence boxed_fence, uint64_t timeout) {
4941         VkFence fence;
4942         VkDevice device;
4943         VulkanDispatch* vk;
4944         StaticLock* fenceLock;
4945         ConditionVariable* cv;
4946         {
4947             std::lock_guard<std::recursive_mutex> lock(mLock);
4948 
4949             fence = unbox_VkFence(boxed_fence);
4950             if (fence == VK_NULL_HANDLE || mFenceInfo.find(fence) == mFenceInfo.end()) {
4951                 // No fence, could be a semaphore.
4952                 // TODO: Async wait for semaphores
4953                 return VK_SUCCESS;
4954             }
4955 
4956             // Vulkan specs require fences of vkQueueSubmit to be *externally
4957             // synchronized*, i.e. we cannot submit a queue while waiting for the
4958             // fence in another thread. For threads that call this function, they
4959             // have to wait until a vkQueueSubmit() using this fence is called
4960             // before calling vkWaitForFences(). So we use a conditional variable
4961             // and mutex for thread synchronization.
4962             //
4963             // See:
4964             // https://www.khronos.org/registry/vulkan/specs/1.2/html/vkspec.html#fundamentals-threadingbehavior
4965             // https://github.com/KhronosGroup/Vulkan-LoaderAndValidationLayers/issues/519
4966 
4967             device = mFenceInfo[fence].device;
4968             vk = mFenceInfo[fence].vk;
4969             fenceLock = &mFenceInfo[fence].lock;
4970             cv = &mFenceInfo[fence].cv;
4971         }
4972 
4973         fenceLock->lock();
4974         cv->wait(fenceLock, [this, fence] {
4975             std::lock_guard<std::recursive_mutex> lock(mLock);
4976             if (mFenceInfo[fence].state == FenceInfo::State::kWaitable) {
4977                 mFenceInfo[fence].state = FenceInfo::State::kWaiting;
4978                 return true;
4979             }
4980             return false;
4981         });
4982         fenceLock->unlock();
4983 
4984         {
4985             std::lock_guard<std::recursive_mutex> lock(mLock);
4986             if (mFenceInfo.find(fence) == mFenceInfo.end()) {
4987                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
4988                     << "Fence was destroyed before vkWaitForFences call.";
4989             }
4990         }
4991 
4992         return vk->vkWaitForFences(device, /* fenceCount */ 1u, &fence,
4993                                    /* waitAll */ false, timeout);
4994     }
4995 
getFenceStatus(VkFence boxed_fence)4996     VkResult getFenceStatus(VkFence boxed_fence) {
4997         VkDevice device;
4998         VkFence fence;
4999         VulkanDispatch* vk;
5000         {
5001             std::lock_guard<std::recursive_mutex> lock(mLock);
5002 
5003             fence = unbox_VkFence(boxed_fence);
5004             if (fence == VK_NULL_HANDLE || mFenceInfo.find(fence) == mFenceInfo.end()) {
5005                 // No fence, could be a semaphore.
5006                 // TODO: Async get status for semaphores
5007                 return VK_SUCCESS;
5008             }
5009 
5010             device = mFenceInfo[fence].device;
5011             vk = mFenceInfo[fence].vk;
5012         }
5013 
5014         return vk->vkGetFenceStatus(device, fence);
5015     }
5016 
registerQsriCallback(VkImage boxed_image,VkQsriTimeline::Callback callback)5017     AsyncResult registerQsriCallback(VkImage boxed_image, VkQsriTimeline::Callback callback) {
5018         VkImage image;
5019         std::shared_ptr<AndroidNativeBufferInfo> anbInfo;
5020         {
5021             std::lock_guard<std::recursive_mutex> lock(mLock);
5022 
5023             image = unbox_VkImage(boxed_image);
5024 
5025             if (mLogging) {
5026                 fprintf(stderr, "%s: for boxed image 0x%llx image %p\n", __func__,
5027                         (unsigned long long)boxed_image, image);
5028             }
5029 
5030             if (image == VK_NULL_HANDLE || mImageInfo.find(image) == mImageInfo.end()) {
5031                 // No image
5032                 return AsyncResult::FAIL_AND_CALLBACK_NOT_SCHEDULED;
5033             }
5034 
5035             anbInfo = mImageInfo[image].anbInfo;  // shared ptr, take ref
5036         }
5037 
5038         if (!anbInfo) {
5039             fprintf(stderr, "%s: warning: image %p doesn't ahve anb info\n", __func__, image);
5040             return AsyncResult::FAIL_AND_CALLBACK_NOT_SCHEDULED;
5041         }
5042         if (!anbInfo->vk) {
5043             fprintf(stderr, "%s:%p warning: image %p anb info not initialized\n", __func__,
5044                     anbInfo.get(), image);
5045             return AsyncResult::FAIL_AND_CALLBACK_NOT_SCHEDULED;
5046         }
5047         // Could be null or mismatched image, check later
5048         if (image != anbInfo->image) {
5049             fprintf(stderr, "%s:%p warning: image %p anb info has wrong image: %p\n", __func__,
5050                     anbInfo.get(), image, anbInfo->image);
5051             return AsyncResult::FAIL_AND_CALLBACK_NOT_SCHEDULED;
5052         }
5053 
5054         anbInfo->qsriTimeline->registerCallbackForNextPresentAndPoll(std::move(callback));
5055 
5056         if (mLogging) {
5057             fprintf(stderr, "%s:%p Done registering\n", __func__, anbInfo.get());
5058         }
5059         return AsyncResult::OK_AND_CALLBACK_SCHEDULED;
5060     }
5061 
5062 #define GUEST_EXTERNAL_MEMORY_HANDLE_TYPES                                \
5063     (VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID | \
5064      VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA)
5065 
5066     // Transforms
5067     // If adding a new transform here, please check if it needs to be used in VkDecoderTestDispatch
5068 
transformImpl_VkExternalMemoryProperties_tohost(const VkExternalMemoryProperties * props,uint32_t count)5069     void transformImpl_VkExternalMemoryProperties_tohost(const VkExternalMemoryProperties* props,
5070                                                          uint32_t count) {
5071         VkExternalMemoryProperties* mut = (VkExternalMemoryProperties*)props;
5072         for (uint32_t i = 0; i < count; ++i) {
5073             mut[i] = transformExternalMemoryProperties_tohost(mut[i]);
5074         }
5075     }
transformImpl_VkExternalMemoryProperties_fromhost(const VkExternalMemoryProperties * props,uint32_t count)5076     void transformImpl_VkExternalMemoryProperties_fromhost(const VkExternalMemoryProperties* props,
5077                                                            uint32_t count) {
5078         VkExternalMemoryProperties* mut = (VkExternalMemoryProperties*)props;
5079         for (uint32_t i = 0; i < count; ++i) {
5080             mut[i] = transformExternalMemoryProperties_fromhost(mut[i],
5081                                                                 GUEST_EXTERNAL_MEMORY_HANDLE_TYPES);
5082         }
5083     }
5084 
transformImpl_VkImageCreateInfo_tohost(const VkImageCreateInfo * pImageCreateInfos,uint32_t count)5085     void transformImpl_VkImageCreateInfo_tohost(const VkImageCreateInfo* pImageCreateInfos,
5086                                                 uint32_t count) {
5087         for (uint32_t i = 0; i < count; i++) {
5088             VkImageCreateInfo& imageCreateInfo =
5089                 const_cast<VkImageCreateInfo&>(pImageCreateInfos[i]);
5090             const VkExternalMemoryImageCreateInfo* pExternalMemoryImageCi =
5091                 vk_find_struct<VkExternalMemoryImageCreateInfo>(&imageCreateInfo);
5092             bool importAndroidHardwareBuffer =
5093                 pExternalMemoryImageCi &&
5094                 (pExternalMemoryImageCi->handleTypes &
5095                  VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID);
5096             const VkNativeBufferANDROID* pNativeBufferANDROID =
5097                 vk_find_struct<VkNativeBufferANDROID>(&imageCreateInfo);
5098 
5099             // If the VkImage is going to bind to a ColorBuffer, we have to make sure the VkImage
5100             // that backs the ColorBuffer is created with identical parameters. From the spec: If
5101             // two aliases are both images that were created with identical creation parameters,
5102             // both were created with the VK_IMAGE_CREATE_ALIAS_BIT flag set, and both are bound
5103             // identically to memory except for VkBindImageMemoryDeviceGroupInfo::pDeviceIndices and
5104             // VkBindImageMemoryDeviceGroupInfo::pSplitInstanceBindRegions, then they interpret the
5105             // contents of the memory in consistent ways, and data written to one alias can be read
5106             // by the other alias. ... Aliases created by binding the same memory to resources in
5107             // multiple Vulkan instances or external APIs using external memory handle export and
5108             // import mechanisms interpret the contents of the memory in consistent ways, and data
5109             // written to one alias can be read by the other alias. Otherwise, the aliases interpret
5110             // the contents of the memory differently, ...
5111             std::unique_ptr<VkImageCreateInfo> colorBufferVkImageCi = nullptr;
5112             std::string importSource;
5113             VkFormat resolvedFormat = VK_FORMAT_UNDEFINED;
5114             // Use UNORM formats for SRGB format requests.
5115             switch (imageCreateInfo.format) {
5116                 case VK_FORMAT_R8G8B8A8_SRGB:
5117                     resolvedFormat = VK_FORMAT_R8G8B8A8_UNORM;
5118                     break;
5119                 case VK_FORMAT_R8G8B8_SRGB:
5120                     resolvedFormat = VK_FORMAT_R8G8B8_UNORM;
5121                     break;
5122                 case VK_FORMAT_B8G8R8A8_SRGB:
5123                     resolvedFormat = VK_FORMAT_B8G8R8A8_UNORM;
5124                     break;
5125                 case VK_FORMAT_R8_SRGB:
5126                     resolvedFormat = VK_FORMAT_R8_UNORM;
5127                     break;
5128                 default:
5129                     resolvedFormat = imageCreateInfo.format;
5130             }
5131             if (importAndroidHardwareBuffer) {
5132                 // For AHardwareBufferImage binding, we can't know which ColorBuffer this
5133                 // to-be-created VkImage will bind to, so we try our best to infer the creation
5134                 // parameters.
5135                 colorBufferVkImageCi = generateColorBufferVkImageCreateInfo(
5136                     resolvedFormat, imageCreateInfo.extent.width, imageCreateInfo.extent.height,
5137                     imageCreateInfo.tiling);
5138                 importSource = "AHardwareBuffer";
5139             } else if (pNativeBufferANDROID) {
5140                 // For native buffer binding, we can query the creation parameters from handle.
5141                 auto colorBufferInfo = getColorBufferInfo(*pNativeBufferANDROID->handle);
5142                 if (colorBufferInfo.handle == *pNativeBufferANDROID->handle) {
5143                     colorBufferVkImageCi =
5144                         std::make_unique<VkImageCreateInfo>(colorBufferInfo.imageCreateInfoShallow);
5145                 } else {
5146                     ERR("Unknown ColorBuffer handle: %" PRIu32 ".", *pNativeBufferANDROID->handle);
5147                 }
5148                 importSource = "NativeBufferANDROID";
5149             }
5150             if (!colorBufferVkImageCi) {
5151                 continue;
5152             }
5153             imageCreateInfo.format = resolvedFormat;
5154             if (imageCreateInfo.flags & (~colorBufferVkImageCi->flags)) {
5155                 ERR("The VkImageCreateInfo to import %s contains unsupported VkImageCreateFlags. "
5156                     "All supported VkImageCreateFlags are %s, the input VkImageCreateInfo requires "
5157                     "support for %s.",
5158                     importSource.c_str(),
5159                     string_VkImageCreateFlags(colorBufferVkImageCi->flags).c_str(),
5160                     string_VkImageCreateFlags(imageCreateInfo.flags).c_str());
5161             }
5162             imageCreateInfo.flags |= colorBufferVkImageCi->flags;
5163             if (imageCreateInfo.imageType != colorBufferVkImageCi->imageType) {
5164                 ERR("The VkImageCreateInfo to import %s has an unexpected VkImageType: %s, %s "
5165                     "expected.",
5166                     importSource.c_str(), string_VkImageType(imageCreateInfo.imageType),
5167                     string_VkImageType(colorBufferVkImageCi->imageType));
5168             }
5169             if (imageCreateInfo.extent.depth != colorBufferVkImageCi->extent.depth) {
5170                 ERR("The VkImageCreateInfo to import %s has an unexpected VkExtent::depth: %" PRIu32
5171                     ", %" PRIu32 " expected.",
5172                     importSource.c_str(), imageCreateInfo.extent.depth,
5173                     colorBufferVkImageCi->extent.depth);
5174             }
5175             if (imageCreateInfo.mipLevels != colorBufferVkImageCi->mipLevels) {
5176                 ERR("The VkImageCreateInfo to import %s has an unexpected mipLevels: %" PRIu32
5177                     ", %" PRIu32 " expected.",
5178                     importSource.c_str(), imageCreateInfo.mipLevels,
5179                     colorBufferVkImageCi->mipLevels);
5180             }
5181             if (imageCreateInfo.arrayLayers != colorBufferVkImageCi->arrayLayers) {
5182                 ERR("The VkImageCreateInfo to import %s has an unexpected arrayLayers: %" PRIu32
5183                     ", %" PRIu32 " expected.",
5184                     importSource.c_str(), imageCreateInfo.arrayLayers,
5185                     colorBufferVkImageCi->arrayLayers);
5186             }
5187             if (imageCreateInfo.samples != colorBufferVkImageCi->samples) {
5188                 ERR("The VkImageCreateInfo to import %s has an unexpected VkSampleCountFlagBits: "
5189                     "%s, %s expected.",
5190                     importSource.c_str(), string_VkSampleCountFlagBits(imageCreateInfo.samples),
5191                     string_VkSampleCountFlagBits(colorBufferVkImageCi->samples));
5192             }
5193             if (imageCreateInfo.usage & (~colorBufferVkImageCi->usage)) {
5194                 ERR("The VkImageCreateInfo to import %s contains unsupported VkImageUsageFlags. "
5195                     "All supported VkImageUsageFlags are %s, the input VkImageCreateInfo requires "
5196                     "support for %s.",
5197                     importSource.c_str(),
5198                     string_VkImageUsageFlags(colorBufferVkImageCi->usage).c_str(),
5199                     string_VkImageUsageFlags(imageCreateInfo.usage).c_str());
5200             }
5201             imageCreateInfo.usage |= colorBufferVkImageCi->usage;
5202             // For the AndroidHardwareBuffer binding case VkImageCreateInfo::sharingMode isn't
5203             // filled in generateColorBufferVkImageCreateInfo, and
5204             // VkImageCreateInfo::{format,extent::{width, height}, tiling} are guaranteed to match.
5205             if (importAndroidHardwareBuffer) {
5206                 continue;
5207             }
5208             if (resolvedFormat != colorBufferVkImageCi->format) {
5209                 ERR("The VkImageCreateInfo to import %s contains unexpected VkFormat: %s. %s "
5210                     "expected.",
5211                     importSource.c_str(), string_VkFormat(imageCreateInfo.format),
5212                     string_VkFormat(colorBufferVkImageCi->format));
5213             }
5214             if (imageCreateInfo.extent.width != colorBufferVkImageCi->extent.width) {
5215                 ERR("The VkImageCreateInfo to import %s contains unexpected VkExtent::width: "
5216                     "%" PRIu32 ". %" PRIu32 " expected.",
5217                     importSource.c_str(), imageCreateInfo.extent.width,
5218                     colorBufferVkImageCi->extent.width);
5219             }
5220             if (imageCreateInfo.extent.height != colorBufferVkImageCi->extent.height) {
5221                 ERR("The VkImageCreateInfo to import %s contains unexpected VkExtent::height: "
5222                     "%" PRIu32 ". %" PRIu32 " expected.",
5223                     importSource.c_str(), imageCreateInfo.extent.height,
5224                     colorBufferVkImageCi->extent.height);
5225             }
5226             if (imageCreateInfo.tiling != colorBufferVkImageCi->tiling) {
5227                 ERR("The VkImageCreateInfo to import %s contains unexpected VkImageTiling: %s. %s "
5228                     "expected.",
5229                     importSource.c_str(), string_VkImageTiling(imageCreateInfo.tiling),
5230                     string_VkImageTiling(colorBufferVkImageCi->tiling));
5231             }
5232             if (imageCreateInfo.sharingMode != colorBufferVkImageCi->sharingMode) {
5233                 ERR("The VkImageCreateInfo to import %s contains unexpected VkSharingMode: %s. %s "
5234                     "expected.",
5235                     importSource.c_str(), string_VkSharingMode(imageCreateInfo.sharingMode),
5236                     string_VkSharingMode(colorBufferVkImageCi->sharingMode));
5237             }
5238         }
5239     }
5240 
transformImpl_VkImageCreateInfo_fromhost(const VkImageCreateInfo *,uint32_t)5241     void transformImpl_VkImageCreateInfo_fromhost(const VkImageCreateInfo*, uint32_t) {
5242         GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << "Not yet implemented.";
5243     }
5244 
5245 #define DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(type, field)                                         \
5246     void transformImpl_##type##_tohost(const type* props, uint32_t count) {                        \
5247         type* mut = (type*)props;                                                                  \
5248         for (uint32_t i = 0; i < count; ++i) {                                                     \
5249             mut[i].field =                                                                         \
5250                 (VkExternalMemoryHandleTypeFlagBits)transformExternalMemoryHandleTypeFlags_tohost( \
5251                     mut[i].field);                                                                 \
5252         }                                                                                          \
5253     }                                                                                              \
5254     void transformImpl_##type##_fromhost(const type* props, uint32_t count) {                      \
5255         type* mut = (type*)props;                                                                  \
5256         for (uint32_t i = 0; i < count; ++i) {                                                     \
5257             mut[i].field = (VkExternalMemoryHandleTypeFlagBits)                                    \
5258                 transformExternalMemoryHandleTypeFlags_fromhost(                                   \
5259                     mut[i].field, GUEST_EXTERNAL_MEMORY_HANDLE_TYPES);                             \
5260         }                                                                                          \
5261     }
5262 
5263 #define DEFINE_EXTERNAL_MEMORY_PROPERTIES_TRANSFORM(type)                                  \
5264     void transformImpl_##type##_tohost(const type* props, uint32_t count) {                \
5265         type* mut = (type*)props;                                                          \
5266         for (uint32_t i = 0; i < count; ++i) {                                             \
5267             mut[i].externalMemoryProperties =                                              \
5268                 transformExternalMemoryProperties_tohost(mut[i].externalMemoryProperties); \
5269         }                                                                                  \
5270     }                                                                                      \
5271     void transformImpl_##type##_fromhost(const type* props, uint32_t count) {              \
5272         type* mut = (type*)props;                                                          \
5273         for (uint32_t i = 0; i < count; ++i) {                                             \
5274             mut[i].externalMemoryProperties = transformExternalMemoryProperties_fromhost(  \
5275                 mut[i].externalMemoryProperties, GUEST_EXTERNAL_MEMORY_HANDLE_TYPES);      \
5276         }                                                                                  \
5277     }
5278 
DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkPhysicalDeviceExternalImageFormatInfo,handleType)5279     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkPhysicalDeviceExternalImageFormatInfo, handleType)
5280     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkPhysicalDeviceExternalBufferInfo, handleType)
5281     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkExternalMemoryImageCreateInfo, handleTypes)
5282     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkExternalMemoryBufferCreateInfo, handleTypes)
5283     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkExportMemoryAllocateInfo, handleTypes)
5284     DEFINE_EXTERNAL_MEMORY_PROPERTIES_TRANSFORM(VkExternalImageFormatProperties)
5285     DEFINE_EXTERNAL_MEMORY_PROPERTIES_TRANSFORM(VkExternalBufferProperties)
5286 
5287     uint64_t newGlobalHandle(const DispatchableHandleInfo<uint64_t>& item,
5288                              BoxedHandleTypeTag typeTag) {
5289         if (!mCreatedHandlesForSnapshotLoad.empty() &&
5290             (mCreatedHandlesForSnapshotLoad.size() - mCreatedHandlesForSnapshotLoadIndex > 0)) {
5291             auto handle = mCreatedHandlesForSnapshotLoad[mCreatedHandlesForSnapshotLoadIndex];
5292             VKDGS_LOG("use handle: %p", handle);
5293             ++mCreatedHandlesForSnapshotLoadIndex;
5294             auto res = sBoxedHandleManager.addFixed(handle, item, typeTag);
5295             return res;
5296         } else {
5297             return sBoxedHandleManager.add(item, typeTag);
5298         }
5299     }
5300 
5301 #define DEFINE_BOXED_DISPATCHABLE_HANDLE_API_IMPL(type)                                           \
5302     type new_boxed_##type(type underlying, VulkanDispatch* dispatch, bool ownDispatch) {          \
5303         DispatchableHandleInfo<uint64_t> item;                                                    \
5304         item.underlying = (uint64_t)underlying;                                                   \
5305         item.dispatch = dispatch ? dispatch : new VulkanDispatch;                                 \
5306         item.ownDispatch = ownDispatch;                                                           \
5307         item.ordMaintInfo = new OrderMaintenanceInfo;                                             \
5308         item.readStream = nullptr;                                                                \
5309         auto res = (type)newGlobalHandle(item, Tag_##type);                                       \
5310         return res;                                                                               \
5311     }                                                                                             \
5312     void delete_##type(type boxed) {                                                              \
5313         if (!boxed) return;                                                                       \
5314         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
5315         if (!elt) return;                                                                         \
5316         releaseOrderMaintInfo(elt->ordMaintInfo);                                                 \
5317         if (elt->readStream) {                                                                    \
5318             sReadStreamRegistry.push(elt->readStream);                                            \
5319             elt->readStream = nullptr;                                                            \
5320         }                                                                                         \
5321         sBoxedHandleManager.remove((uint64_t)boxed);                                              \
5322     }                                                                                             \
5323     type unbox_##type(type boxed) {                                                               \
5324         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
5325         if (!elt) return VK_NULL_HANDLE;                                                          \
5326         return (type)elt->underlying;                                                             \
5327     }                                                                                             \
5328     OrderMaintenanceInfo* ordmaint_##type(type boxed) {                                           \
5329         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
5330         if (!elt) return 0;                                                                       \
5331         auto info = elt->ordMaintInfo;                                                            \
5332         if (!info) return 0;                                                                      \
5333         acquireOrderMaintInfo(info);                                                              \
5334         return info;                                                                              \
5335     }                                                                                             \
5336     VulkanMemReadingStream* readstream_##type(type boxed) {                                       \
5337         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
5338         if (!elt) return 0;                                                                       \
5339         auto stream = elt->readStream;                                                            \
5340         if (!stream) {                                                                            \
5341             stream = sReadStreamRegistry.pop();                                                   \
5342             elt->readStream = stream;                                                             \
5343         }                                                                                         \
5344         return stream;                                                                            \
5345     }                                                                                             \
5346     type unboxed_to_boxed_##type(type unboxed) {                                                  \
5347         AutoLock lock(sBoxedHandleManager.lock);                                                  \
5348         return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
5349     }                                                                                             \
5350     VulkanDispatch* dispatch_##type(type boxed) {                                                 \
5351         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
5352         if (!elt) {                                                                               \
5353             fprintf(stderr, "%s: err not found boxed %p\n", __func__, boxed);                     \
5354             return nullptr;                                                                       \
5355         }                                                                                         \
5356         return elt->dispatch;                                                                     \
5357     }
5358 
5359 #define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_IMPL(type)                                       \
5360     type new_boxed_non_dispatchable_##type(type underlying) {                                     \
5361         DispatchableHandleInfo<uint64_t> item;                                                    \
5362         item.underlying = (uint64_t)underlying;                                                   \
5363         auto res = (type)newGlobalHandle(item, Tag_##type);                                       \
5364         return res;                                                                               \
5365     }                                                                                             \
5366     void delayed_delete_##type(type boxed, VkDevice device, std::function<void()> callback) {     \
5367         sBoxedHandleManager.removeDelayed((uint64_t)boxed, device, callback);                     \
5368     }                                                                                             \
5369     void delete_##type(type boxed) { sBoxedHandleManager.remove((uint64_t)boxed); }               \
5370     void set_boxed_non_dispatchable_##type(type boxed, type underlying) {                         \
5371         DispatchableHandleInfo<uint64_t> item;                                                    \
5372         item.underlying = (uint64_t)underlying;                                                   \
5373         sBoxedHandleManager.addFixed((uint64_t)boxed, item, Tag_##type);                          \
5374     }                                                                                             \
5375     type unboxed_to_boxed_non_dispatchable_##type(type unboxed) {                                 \
5376         AutoLock lock(sBoxedHandleManager.lock);                                                  \
5377         return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
5378     }                                                                                             \
5379     type unbox_##type(type boxed) {                                                               \
5380         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
5381         if (!elt) {                                                                               \
5382             if constexpr (!std::is_same_v<type, VkFence>) {                                       \
5383                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))                                   \
5384                     << "Unbox " << boxed << " failed, not found.";                                \
5385             }                                                                                     \
5386             return VK_NULL_HANDLE;                                                                \
5387         }                                                                                         \
5388         return (type)elt->underlying;                                                             \
5389     }
5390 
5391     GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_API_IMPL)
GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_IMPL)5392     GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_IMPL)
5393 
5394     VkDecoderSnapshot* snapshot() { return &mSnapshot; }
5395 
5396    private:
isEmulatedInstanceExtension(const char * name) const5397     bool isEmulatedInstanceExtension(const char* name) const {
5398         for (auto emulatedExt : kEmulatedInstanceExtensions) {
5399             if (!strcmp(emulatedExt, name)) return true;
5400         }
5401         return false;
5402     }
5403 
isEmulatedDeviceExtension(const char * name) const5404     bool isEmulatedDeviceExtension(const char* name) const {
5405         for (auto emulatedExt : kEmulatedDeviceExtensions) {
5406             if (!strcmp(emulatedExt, name)) return true;
5407         }
5408         return false;
5409     }
5410 
supportEmulatedCompressedImageFormatProperty(VkFormat compressedFormat,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags)5411     bool supportEmulatedCompressedImageFormatProperty(VkFormat compressedFormat, VkImageType type,
5412                                                       VkImageTiling tiling, VkImageUsageFlags usage,
5413                                                       VkImageCreateFlags flags) {
5414         // BUG: 139193497
5415         return !(usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(type == VK_IMAGE_TYPE_1D);
5416     }
5417 
filteredDeviceExtensionNames(VulkanDispatch * vk,VkPhysicalDevice physicalDevice,uint32_t count,const char * const * extNames)5418     std::vector<const char*> filteredDeviceExtensionNames(VulkanDispatch* vk,
5419                                                           VkPhysicalDevice physicalDevice,
5420                                                           uint32_t count,
5421                                                           const char* const* extNames) {
5422         std::vector<const char*> res;
5423         std::vector<VkExtensionProperties> properties;
5424         VkResult result;
5425 
5426         for (uint32_t i = 0; i < count; ++i) {
5427             auto extName = extNames[i];
5428             if (!isEmulatedDeviceExtension(extName)) {
5429                 res.push_back(extName);
5430                 continue;
5431             }
5432         }
5433 
5434         result = enumerateDeviceExtensionProperties(vk, physicalDevice, nullptr, properties);
5435         if (result != VK_SUCCESS) {
5436             VKDGS_LOG("failed to enumerate device extensions");
5437             return res;
5438         }
5439 
5440         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME)) {
5441             res.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
5442         }
5443 
5444         if (hasDeviceExtension(properties, VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME)) {
5445             res.push_back(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME);
5446         }
5447 
5448         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME)) {
5449             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
5450         }
5451 
5452         if (hasDeviceExtension(properties, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) {
5453             res.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
5454         }
5455 
5456 #ifdef _WIN32
5457         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME)) {
5458             res.push_back(VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
5459         }
5460 
5461         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME)) {
5462             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME);
5463         }
5464 #elif __unix__
5465         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME)) {
5466             res.push_back(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
5467         }
5468 
5469         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME)) {
5470             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
5471         }
5472 #endif
5473 
5474 #ifdef __linux__
5475         // A dma-buf is a Linux kernel construct, commonly used with open-source DRM drivers.
5476         // See https://docs.kernel.org/driver-api/dma-buf.html for details.
5477         if (hasDeviceExtension(properties, VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME)) {
5478             res.push_back(VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME);
5479         }
5480 #endif
5481         return res;
5482     }
5483 
filteredInstanceExtensionNames(uint32_t count,const char * const * extNames)5484     std::vector<const char*> filteredInstanceExtensionNames(uint32_t count,
5485                                                             const char* const* extNames) {
5486         std::vector<const char*> res;
5487         for (uint32_t i = 0; i < count; ++i) {
5488             auto extName = extNames[i];
5489             if (!isEmulatedInstanceExtension(extName)) {
5490                 res.push_back(extName);
5491             }
5492         }
5493 
5494         if (m_emu->instanceSupportsExternalMemoryCapabilities) {
5495             res.push_back(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME);
5496         }
5497 
5498         if (m_emu->instanceSupportsExternalSemaphoreCapabilities) {
5499             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
5500         }
5501 
5502         if (m_emu->debugUtilsAvailableAndRequested) {
5503             res.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
5504         }
5505 
5506         return res;
5507     }
5508 
memPropsOfDeviceLocked(VkDevice device)5509     VkPhysicalDeviceMemoryProperties* memPropsOfDeviceLocked(VkDevice device) {
5510         auto* physdev = android::base::find(mDeviceToPhysicalDevice, device);
5511         if (!physdev) return nullptr;
5512 
5513         auto* physdevInfo = android::base::find(mPhysdevInfo, *physdev);
5514         if (!physdevInfo) return nullptr;
5515 
5516         return &physdevInfo->memoryProperties;
5517     }
5518 
getDefaultQueueForDeviceLocked(VkDevice device,VkQueue * queue,uint32_t * queueFamilyIndex,Lock ** queueLock)5519     bool getDefaultQueueForDeviceLocked(VkDevice device, VkQueue* queue, uint32_t* queueFamilyIndex,
5520                                         Lock** queueLock) {
5521         auto* deviceInfo = android::base::find(mDeviceInfo, device);
5522         if (!deviceInfo) return false;
5523 
5524         auto zeroIt = deviceInfo->queues.find(0);
5525         if (zeroIt == deviceInfo->queues.end() || zeroIt->second.empty()) {
5526             // Get the first queue / queueFamilyIndex
5527             // that does show up.
5528             for (const auto& it : deviceInfo->queues) {
5529                 auto index = it.first;
5530                 for (auto& deviceQueue : it.second) {
5531                     *queue = deviceQueue;
5532                     *queueFamilyIndex = index;
5533                     *queueLock = mQueueInfo.at(deviceQueue).lock;
5534                     return true;
5535                 }
5536             }
5537             // Didn't find anything, fail.
5538             return false;
5539         } else {
5540             // Use queue family index 0.
5541             *queue = zeroIt->second[0];
5542             *queueFamilyIndex = 0;
5543             *queueLock = mQueueInfo.at(zeroIt->second[0]).lock;
5544             return true;
5545         }
5546 
5547         return false;
5548     }
5549 
updateImageMemorySizeLocked(VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)5550     void updateImageMemorySizeLocked(VkDevice device, VkImage image,
5551                                      VkMemoryRequirements* pMemoryRequirements) {
5552         auto* deviceInfo = android::base::find(mDeviceInfo, device);
5553         if (!deviceInfo->emulateTextureEtc2 && !deviceInfo->emulateTextureAstc) {
5554             return;
5555         }
5556         auto* imageInfo = android::base::find(mImageInfo, image);
5557         if (!imageInfo) return;
5558         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
5559         if (!deviceInfo->needEmulatedDecompression(cmpInfo)) {
5560             return;
5561         }
5562         VkMemoryRequirements cmpReq = cmpInfo.getMemoryRequirements();
5563         pMemoryRequirements->alignment = std::max(pMemoryRequirements->alignment, cmpReq.alignment);
5564         pMemoryRequirements->size += cmpReq.size;
5565     }
5566 
5567     // Whether the VkInstance associated with this physical device was created by ANGLE
isAngleInstance(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)5568     bool isAngleInstance(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
5569         std::lock_guard<std::recursive_mutex> lock(mLock);
5570         VkInstance* instance = android::base::find(mPhysicalDeviceToInstance, physicalDevice);
5571         if (!instance) return false;
5572         InstanceInfo* instanceInfo = android::base::find(mInstanceInfo, *instance);
5573         if (!instanceInfo) return false;
5574         return instanceInfo->isAngle;
5575     }
5576 
enableEmulatedEtc2(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)5577     bool enableEmulatedEtc2(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
5578         if (!m_emu->enableEtc2Emulation) return false;
5579 
5580         // Don't enable ETC2 emulation for ANGLE, let it do its own emulation.
5581         return !isAngleInstance(physicalDevice, vk);
5582     }
5583 
enableEmulatedAstc(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)5584     bool enableEmulatedAstc(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
5585         if (m_emu->astcLdrEmulationMode == AstcEmulationMode::Disabled) {
5586             return false;
5587         }
5588 
5589         // Don't enable ASTC emulation for ANGLE, let it do its own emulation.
5590         return !isAngleInstance(physicalDevice, vk);
5591     }
5592 
needEmulatedEtc2(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)5593     bool needEmulatedEtc2(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
5594         if (!enableEmulatedEtc2(physicalDevice, vk)) {
5595             return false;
5596         }
5597         VkPhysicalDeviceFeatures feature;
5598         vk->vkGetPhysicalDeviceFeatures(physicalDevice, &feature);
5599         return !feature.textureCompressionETC2;
5600     }
5601 
needEmulatedAstc(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)5602     bool needEmulatedAstc(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
5603         if (!enableEmulatedAstc(physicalDevice, vk)) {
5604             return false;
5605         }
5606         VkPhysicalDeviceFeatures feature;
5607         vk->vkGetPhysicalDeviceFeatures(physicalDevice, &feature);
5608         return !feature.textureCompressionASTC_LDR;
5609     }
5610 
isEmulatedCompressedTexture(VkFormat format,VkPhysicalDevice physicalDevice,VulkanDispatch * vk)5611     bool isEmulatedCompressedTexture(VkFormat format, VkPhysicalDevice physicalDevice,
5612                                      VulkanDispatch* vk) {
5613         return (CompressedImageInfo::isEtc2(format) && needEmulatedEtc2(physicalDevice, vk)) ||
5614                (CompressedImageInfo::isAstc(format) && needEmulatedAstc(physicalDevice, vk));
5615     }
5616 
5617     static const VkFormatFeatureFlags kEmulatedTextureBufferFeatureMask =
5618         VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT |
5619         VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT;
5620 
5621     static const VkFormatFeatureFlags kEmulatedTextureOptimalTilingMask =
5622         VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT |
5623         VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT |
5624         VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT;
5625 
maskFormatPropertiesForEmulatedTextures(VkFormatProperties * pFormatProp)5626     void maskFormatPropertiesForEmulatedTextures(VkFormatProperties* pFormatProp) {
5627         pFormatProp->linearTilingFeatures &= kEmulatedTextureBufferFeatureMask;
5628         pFormatProp->optimalTilingFeatures &= kEmulatedTextureOptimalTilingMask;
5629         pFormatProp->bufferFeatures &= kEmulatedTextureBufferFeatureMask;
5630     }
5631 
maskFormatPropertiesForEmulatedTextures(VkFormatProperties2 * pFormatProp)5632     void maskFormatPropertiesForEmulatedTextures(VkFormatProperties2* pFormatProp) {
5633         pFormatProp->formatProperties.linearTilingFeatures &= kEmulatedTextureBufferFeatureMask;
5634         pFormatProp->formatProperties.optimalTilingFeatures &= kEmulatedTextureOptimalTilingMask;
5635         pFormatProp->formatProperties.bufferFeatures &= kEmulatedTextureBufferFeatureMask;
5636     }
5637 
maskImageFormatPropertiesForEmulatedTextures(VkImageFormatProperties * pProperties)5638     void maskImageFormatPropertiesForEmulatedTextures(VkImageFormatProperties* pProperties) {
5639         // dEQP-VK.api.info.image_format_properties.2d.optimal#etc2_r8g8b8_unorm_block
5640         pProperties->sampleCounts &= VK_SAMPLE_COUNT_1_BIT;
5641     }
5642 
5643     template <class VkFormatProperties1or2>
getPhysicalDeviceFormatPropertiesCore(std::function<void (VkPhysicalDevice,VkFormat,VkFormatProperties1or2 *)> getPhysicalDeviceFormatPropertiesFunc,VulkanDispatch * vk,VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties1or2 * pFormatProperties)5644     void getPhysicalDeviceFormatPropertiesCore(
5645         std::function<void(VkPhysicalDevice, VkFormat, VkFormatProperties1or2*)>
5646             getPhysicalDeviceFormatPropertiesFunc,
5647         VulkanDispatch* vk, VkPhysicalDevice physicalDevice, VkFormat format,
5648         VkFormatProperties1or2* pFormatProperties) {
5649         if (isEmulatedCompressedTexture(format, physicalDevice, vk)) {
5650             getPhysicalDeviceFormatPropertiesFunc(
5651                 physicalDevice, CompressedImageInfo::getDecompressedFormat(format),
5652                 pFormatProperties);
5653             maskFormatPropertiesForEmulatedTextures(pFormatProperties);
5654             return;
5655         }
5656         getPhysicalDeviceFormatPropertiesFunc(physicalDevice, format, pFormatProperties);
5657     }
5658 
executePreprocessRecursive(int level,VkCommandBuffer cmdBuffer)5659     void executePreprocessRecursive(int level, VkCommandBuffer cmdBuffer) {
5660         auto* cmdBufferInfo = android::base::find(mCmdBufferInfo, cmdBuffer);
5661         if (!cmdBufferInfo) return;
5662         for (const auto& func : cmdBufferInfo->preprocessFuncs) {
5663             func();
5664         }
5665         // TODO: fix
5666         // for (const auto& subCmd : cmdBufferInfo->subCmds) {
5667         // executePreprocessRecursive(level + 1, subCmd);
5668         // }
5669     }
5670 
5671     template <typename VkHandleToInfoMap,
5672               typename HandleType = typename std::decay_t<VkHandleToInfoMap>::key_type>
findDeviceObjects(VkDevice device,const VkHandleToInfoMap & map)5673     std::vector<HandleType> findDeviceObjects(VkDevice device, const VkHandleToInfoMap& map) {
5674         std::vector<HandleType> objectsFromDevice;
5675         for (const auto& [objectHandle, objectInfo] : map) {
5676             if (objectInfo.device == device) {
5677                 objectsFromDevice.push_back(objectHandle);
5678             }
5679         }
5680         return objectsFromDevice;
5681     }
5682 
5683     template <typename VkHandleToInfoMap, typename InfoMemberType,
5684               typename HandleType = typename std::decay_t<VkHandleToInfoMap>::key_type,
5685               typename InfoType = typename std::decay_t<VkHandleToInfoMap>::value_type>
findDeviceObjects(VkDevice device,const VkHandleToInfoMap & map,InfoMemberType InfoType::* member)5686     std::vector<std::pair<HandleType, InfoMemberType>> findDeviceObjects(
5687         VkDevice device, const VkHandleToInfoMap& map, InfoMemberType InfoType::*member) {
5688         std::vector<std::pair<HandleType, InfoMemberType>> objectsFromDevice;
5689         for (const auto& [objectHandle, objectInfo] : map) {
5690             if (objectInfo.device == device) {
5691                 objectsFromDevice.emplace_back(objectHandle, objectInfo.*member);
5692             }
5693         }
5694         return objectsFromDevice;
5695     }
5696 
teardownInstanceLocked(VkInstance instance)5697     void teardownInstanceLocked(VkInstance instance) {
5698         std::vector<VkDevice> devicesToDestroy;
5699         std::vector<VulkanDispatch*> devicesToDestroyDispatches;
5700 
5701         for (auto it : mDeviceToPhysicalDevice) {
5702             auto* otherInstance = android::base::find(mPhysicalDeviceToInstance, it.second);
5703             if (!otherInstance) continue;
5704 
5705             if (instance == *otherInstance) {
5706                 devicesToDestroy.push_back(it.first);
5707                 devicesToDestroyDispatches.push_back(
5708                     dispatch_VkDevice(mDeviceInfo[it.first].boxed));
5709             }
5710         }
5711 
5712         for (uint32_t i = 0; i < devicesToDestroy.size(); ++i) {
5713             VkDevice deviceToDestroy = devicesToDestroy[i];
5714             VulkanDispatch* deviceToDestroyDispatch = devicesToDestroyDispatches[i];
5715 
5716             // https://bugs.chromium.org/p/chromium/issues/detail?id=1074600
5717             // it's important to idle the device before destroying it!
5718             deviceToDestroyDispatch->vkDeviceWaitIdle(deviceToDestroy);
5719 
5720             for (auto semaphore : findDeviceObjects(deviceToDestroy, mSemaphoreInfo)) {
5721                 destroySemaphoreLocked(deviceToDestroy, deviceToDestroyDispatch, semaphore,
5722                                        nullptr);
5723             }
5724 
5725             for (auto sampler : findDeviceObjects(deviceToDestroy, mSamplerInfo)) {
5726                 destroySamplerLocked(deviceToDestroy, deviceToDestroyDispatch, sampler, nullptr);
5727             }
5728 
5729             for (auto buffer : findDeviceObjects(deviceToDestroy, mBufferInfo)) {
5730                 deviceToDestroyDispatch->vkDestroyBuffer(deviceToDestroy, buffer, nullptr);
5731                 mBufferInfo.erase(buffer);
5732             }
5733 
5734             for (auto imageView : findDeviceObjects(deviceToDestroy, mImageViewInfo)) {
5735                 deviceToDestroyDispatch->vkDestroyImageView(deviceToDestroy, imageView, nullptr);
5736                 mImageViewInfo.erase(imageView);
5737             }
5738 
5739             for (auto image : findDeviceObjects(deviceToDestroy, mImageInfo)) {
5740                 destroyImageLocked(deviceToDestroy, deviceToDestroyDispatch, image, nullptr);
5741             }
5742 
5743             for (auto memory : findDeviceObjects(deviceToDestroy, mMemoryInfo)) {
5744                 freeMemoryLocked(deviceToDestroyDispatch, deviceToDestroy, memory, nullptr);
5745             }
5746 
5747             for (auto [commandBuffer, commandPool] :
5748                  findDeviceObjects(deviceToDestroy, mCmdBufferInfo, &CommandBufferInfo::cmdPool)) {
5749                 // The command buffer is freed with the vkDestroyCommandPool() below.
5750                 delete_VkCommandBuffer(unboxed_to_boxed_VkCommandBuffer(commandBuffer));
5751                 mCmdBufferInfo.erase(commandBuffer);
5752             }
5753 
5754             for (auto [commandPool, commandPoolBoxed] :
5755                  findDeviceObjects(deviceToDestroy, mCmdPoolInfo, &CommandPoolInfo::boxed)) {
5756                 deviceToDestroyDispatch->vkDestroyCommandPool(deviceToDestroy, commandPool,
5757                                                               nullptr);
5758                 delete_VkCommandPool(commandPoolBoxed);
5759                 mCmdPoolInfo.erase(commandPool);
5760             }
5761 
5762             for (auto [descriptorPool, descriptorPoolBoxed] : findDeviceObjects(
5763                      deviceToDestroy, mDescriptorPoolInfo, &DescriptorPoolInfo::boxed)) {
5764                 cleanupDescriptorPoolAllocedSetsLocked(descriptorPool, /*isDestroy=*/true);
5765                 deviceToDestroyDispatch->vkDestroyDescriptorPool(deviceToDestroy, descriptorPool,
5766                                                                  nullptr);
5767                 delete_VkDescriptorPool(descriptorPoolBoxed);
5768                 mDescriptorPoolInfo.erase(descriptorPool);
5769             }
5770 
5771             for (auto [descriptorSetLayout, descriptorSetLayoutBoxed] : findDeviceObjects(
5772                      deviceToDestroy, mDescriptorSetLayoutInfo, &DescriptorSetLayoutInfo::boxed)) {
5773                 deviceToDestroyDispatch->vkDestroyDescriptorSetLayout(deviceToDestroy,
5774                                                                       descriptorSetLayout, nullptr);
5775                 delete_VkDescriptorSetLayout(descriptorSetLayoutBoxed);
5776                 mDescriptorSetLayoutInfo.erase(descriptorSetLayout);
5777             }
5778 
5779             for (auto shaderModule : findDeviceObjects(deviceToDestroy, mShaderModuleInfo)) {
5780                 destroyShaderModuleLocked(deviceToDestroy, deviceToDestroyDispatch, shaderModule,
5781                                           nullptr);
5782             }
5783 
5784             for (auto pipeline : findDeviceObjects(deviceToDestroy, mPipelineInfo)) {
5785                 destroyPipelineLocked(deviceToDestroy, deviceToDestroyDispatch, pipeline, nullptr);
5786             }
5787 
5788             for (auto pipelineCache : findDeviceObjects(deviceToDestroy, mPipelineCacheInfo)) {
5789                 destroyPipelineCacheLocked(deviceToDestroy, deviceToDestroyDispatch, pipelineCache,
5790                                            nullptr);
5791             }
5792 
5793             for (auto framebuffer : findDeviceObjects(deviceToDestroy, mFramebufferInfo)) {
5794                 destroyFramebufferLocked(deviceToDestroy, deviceToDestroyDispatch, framebuffer,
5795                                          nullptr);
5796             }
5797 
5798             for (auto renderPass : findDeviceObjects(deviceToDestroy, mRenderPassInfo)) {
5799                 destroyRenderPassLocked(deviceToDestroy, deviceToDestroyDispatch, renderPass,
5800                                         nullptr);
5801             }
5802         }
5803 
5804         for (VkDevice deviceToDestroy : devicesToDestroy) {
5805             destroyDeviceLocked(deviceToDestroy, nullptr);
5806             mDeviceInfo.erase(deviceToDestroy);
5807             mDeviceToPhysicalDevice.erase(deviceToDestroy);
5808         }
5809 
5810         // TODO: Clean up the physical device info in `mPhysdevInfo` but we need to be careful
5811         // as the Vulkan spec does not guarantee that the VkPhysicalDevice handles returned are
5812         // unique per VkInstance.
5813     }
5814 
5815     typedef std::function<void()> PreprocessFunc;
5816     struct CommandBufferInfo {
5817         std::vector<PreprocessFunc> preprocessFuncs = {};
5818         std::vector<VkCommandBuffer> subCmds = {};
5819         VkDevice device = VK_NULL_HANDLE;
5820         VkCommandPool cmdPool = VK_NULL_HANDLE;
5821         VkCommandBuffer boxed = VK_NULL_HANDLE;
5822         DebugUtilsHelper debugUtilsHelper = DebugUtilsHelper::withUtilsDisabled();
5823 
5824         // Most recently bound compute pipeline and descriptor sets. We save it here so that we can
5825         // restore it after doing emulated texture decompression.
5826         VkPipeline computePipeline = VK_NULL_HANDLE;
5827         uint32_t firstSet = 0;
5828         VkPipelineLayout descriptorLayout = VK_NULL_HANDLE;
5829         std::vector<VkDescriptorSet> descriptorSets;
5830         std::vector<uint32_t> dynamicOffsets;
5831 
resetgfxstream::vk::VkDecoderGlobalState::Impl::CommandBufferInfo5832         void reset() {
5833             preprocessFuncs.clear();
5834             subCmds.clear();
5835             computePipeline = VK_NULL_HANDLE;
5836             firstSet = 0;
5837             descriptorLayout = VK_NULL_HANDLE;
5838             descriptorSets.clear();
5839             dynamicOffsets.clear();
5840         }
5841     };
5842 
5843     struct CommandPoolInfo {
5844         VkDevice device = VK_NULL_HANDLE;
5845         VkCommandPool boxed = VK_NULL_HANDLE;
5846         std::unordered_set<VkCommandBuffer> cmdBuffers = {};
5847     };
5848 
removeCommandBufferInfo(const std::unordered_set<VkCommandBuffer> & cmdBuffers)5849     void removeCommandBufferInfo(const std::unordered_set<VkCommandBuffer>& cmdBuffers) {
5850         for (const auto& cmdBuffer : cmdBuffers) {
5851             mCmdBufferInfo.erase(cmdBuffer);
5852         }
5853     }
5854 
isDescriptorTypeImageInfo(VkDescriptorType descType)5855     bool isDescriptorTypeImageInfo(VkDescriptorType descType) {
5856         return (descType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
5857                (descType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
5858                (descType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
5859                (descType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) ||
5860                (descType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
5861     }
5862 
isDescriptorTypeBufferInfo(VkDescriptorType descType)5863     bool isDescriptorTypeBufferInfo(VkDescriptorType descType) {
5864         return (descType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
5865                (descType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
5866                (descType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
5867                (descType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
5868     }
5869 
isDescriptorTypeBufferView(VkDescriptorType descType)5870     bool isDescriptorTypeBufferView(VkDescriptorType descType) {
5871         return (descType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
5872                (descType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
5873     }
5874 
5875     struct DescriptorUpdateTemplateInfo {
5876         VkDescriptorUpdateTemplateCreateInfo createInfo;
5877         std::vector<VkDescriptorUpdateTemplateEntry> linearizedTemplateEntries;
5878         // Preallocated pData
5879         std::vector<uint8_t> data;
5880         size_t imageInfoStart;
5881         size_t bufferInfoStart;
5882         size_t bufferViewStart;
5883         size_t inlineUniformBlockStart;
5884     };
5885 
calcLinearizedDescriptorUpdateTemplateInfo(const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo)5886     DescriptorUpdateTemplateInfo calcLinearizedDescriptorUpdateTemplateInfo(
5887         const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo) {
5888         DescriptorUpdateTemplateInfo res;
5889         res.createInfo = *pCreateInfo;
5890 
5891         size_t numImageInfos = 0;
5892         size_t numBufferInfos = 0;
5893         size_t numBufferViews = 0;
5894         size_t numInlineUniformBlocks = 0;
5895 
5896         for (uint32_t i = 0; i < pCreateInfo->descriptorUpdateEntryCount; ++i) {
5897             const auto& entry = pCreateInfo->pDescriptorUpdateEntries[i];
5898             auto type = entry.descriptorType;
5899             auto count = entry.descriptorCount;
5900             if (isDescriptorTypeImageInfo(type)) {
5901                 numImageInfos += count;
5902             } else if (isDescriptorTypeBufferInfo(type)) {
5903                 numBufferInfos += count;
5904             } else if (isDescriptorTypeBufferView(type)) {
5905                 numBufferViews += count;
5906             } else if (type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
5907                 numInlineUniformBlocks += count;
5908             } else {
5909                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
5910                     << "unknown descriptor type 0x" << std::hex << type;
5911             }
5912         }
5913 
5914         size_t imageInfoBytes = numImageInfos * sizeof(VkDescriptorImageInfo);
5915         size_t bufferInfoBytes = numBufferInfos * sizeof(VkDescriptorBufferInfo);
5916         size_t bufferViewBytes = numBufferViews * sizeof(VkBufferView);
5917         size_t inlineUniformBlockBytes = numInlineUniformBlocks;
5918 
5919         res.data.resize(imageInfoBytes + bufferInfoBytes + bufferViewBytes +
5920                         inlineUniformBlockBytes);
5921         res.imageInfoStart = 0;
5922         res.bufferInfoStart = imageInfoBytes;
5923         res.bufferViewStart = imageInfoBytes + bufferInfoBytes;
5924         res.inlineUniformBlockStart = imageInfoBytes + bufferInfoBytes + bufferViewBytes;
5925 
5926         size_t imageInfoCount = 0;
5927         size_t bufferInfoCount = 0;
5928         size_t bufferViewCount = 0;
5929         size_t inlineUniformBlockCount = 0;
5930 
5931         for (uint32_t i = 0; i < pCreateInfo->descriptorUpdateEntryCount; ++i) {
5932             const auto& entry = pCreateInfo->pDescriptorUpdateEntries[i];
5933             VkDescriptorUpdateTemplateEntry entryForHost = entry;
5934 
5935             auto type = entry.descriptorType;
5936 
5937             if (isDescriptorTypeImageInfo(type)) {
5938                 entryForHost.offset =
5939                     res.imageInfoStart + imageInfoCount * sizeof(VkDescriptorImageInfo);
5940                 entryForHost.stride = sizeof(VkDescriptorImageInfo);
5941                 ++imageInfoCount;
5942             } else if (isDescriptorTypeBufferInfo(type)) {
5943                 entryForHost.offset =
5944                     res.bufferInfoStart + bufferInfoCount * sizeof(VkDescriptorBufferInfo);
5945                 entryForHost.stride = sizeof(VkDescriptorBufferInfo);
5946                 ++bufferInfoCount;
5947             } else if (isDescriptorTypeBufferView(type)) {
5948                 entryForHost.offset = res.bufferViewStart + bufferViewCount * sizeof(VkBufferView);
5949                 entryForHost.stride = sizeof(VkBufferView);
5950                 ++bufferViewCount;
5951             } else if (type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
5952                 entryForHost.offset = res.inlineUniformBlockStart + inlineUniformBlockCount;
5953                 entryForHost.stride = 0;
5954                 inlineUniformBlockCount += entryForHost.descriptorCount;
5955             } else {
5956                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
5957                     << "unknown descriptor type 0x" << std::hex << type;
5958             }
5959 
5960             res.linearizedTemplateEntries.push_back(entryForHost);
5961         }
5962 
5963         res.createInfo.pDescriptorUpdateEntries = res.linearizedTemplateEntries.data();
5964 
5965         return res;
5966     }
5967 
registerDescriptorUpdateTemplate(VkDescriptorUpdateTemplate descriptorUpdateTemplate,const DescriptorUpdateTemplateInfo & info)5968     void registerDescriptorUpdateTemplate(VkDescriptorUpdateTemplate descriptorUpdateTemplate,
5969                                           const DescriptorUpdateTemplateInfo& info) {
5970         std::lock_guard<std::recursive_mutex> lock(mLock);
5971         mDescriptorUpdateTemplateInfo[descriptorUpdateTemplate] = info;
5972     }
5973 
unregisterDescriptorUpdateTemplate(VkDescriptorUpdateTemplate descriptorUpdateTemplate)5974     void unregisterDescriptorUpdateTemplate(VkDescriptorUpdateTemplate descriptorUpdateTemplate) {
5975         std::lock_guard<std::recursive_mutex> lock(mLock);
5976         mDescriptorUpdateTemplateInfo.erase(descriptorUpdateTemplate);
5977     }
5978 
5979     // Returns the VkInstance associated with a VkDevice, or null if it's not found
deviceToInstanceLocked(VkDevice device)5980     VkInstance* deviceToInstanceLocked(VkDevice device) {
5981         auto* physicalDevice = android::base::find(mDeviceToPhysicalDevice, device);
5982         if (!physicalDevice) return nullptr;
5983         return android::base::find(mPhysicalDeviceToInstance, *physicalDevice);
5984     }
5985 
5986     VulkanDispatch* m_vk;
5987     VkEmulation* m_emu;
5988     emugl::RenderDocWithMultipleVkInstances* mRenderDocWithMultipleVkInstances = nullptr;
5989     bool mSnapshotsEnabled = false;
5990     bool mVkCleanupEnabled = true;
5991     bool mLogging = false;
5992     bool mVerbosePrints = false;
5993     bool mUseOldMemoryCleanupPath = false;
5994     bool mGuestUsesAngle = false;
5995 
5996     std::recursive_mutex mLock;
5997 
5998     // We always map the whole size on host.
5999     // This makes it much easier to implement
6000     // the memory map API.
6001     struct MemoryInfo {
6002         // This indicates whether the VkDecoderGlobalState needs to clean up
6003         // and unmap the mapped memory; only the owner of the mapped memory
6004         // should call unmap.
6005         bool needUnmap = false;
6006         // When ptr is null, it means the VkDeviceMemory object
6007         // was not allocated with the HOST_VISIBLE property.
6008         void* ptr = nullptr;
6009         VkDeviceSize size;
6010         // GLDirectMem info
6011         bool directMapped = false;
6012         bool virtioGpuMapped = false;
6013         uint32_t caching = 0;
6014         uint64_t guestPhysAddr = 0;
6015         void* pageAlignedHva = nullptr;
6016         uint64_t sizeToPage = 0;
6017         uint64_t hostmemId = 0;
6018         VkDevice device = VK_NULL_HANDLE;
6019         MTLTextureRef mtlTexture = nullptr;
6020         uint32_t memoryIndex = 0;
6021         // Set if the memory is backed by shared memory.
6022         std::optional<SharedMemory> sharedMemory;
6023 
6024         // virtio-gpu blobs
6025         uint64_t blobId = 0;
6026     };
6027 
6028     struct InstanceInfo {
6029         std::vector<std::string> enabledExtensionNames;
6030         uint32_t apiVersion = VK_MAKE_VERSION(1, 0, 0);
6031         VkInstance boxed = nullptr;
6032         bool useAstcCpuDecompression = false;
6033         bool isAngle = false;
6034     };
6035 
6036     struct PhysicalDeviceInfo {
6037         VkPhysicalDeviceProperties props;
6038         VkPhysicalDeviceMemoryProperties memoryProperties;
6039         std::vector<VkQueueFamilyProperties> queueFamilyProperties;
6040         VkPhysicalDevice boxed = nullptr;
6041     };
6042 
6043     struct DeviceInfo {
6044         std::unordered_map<uint32_t, std::vector<VkQueue>> queues;
6045         std::vector<std::string> enabledExtensionNames;
6046         bool emulateTextureEtc2 = false;
6047         bool emulateTextureAstc = false;
6048         VkPhysicalDevice physicalDevice;
6049         VkDevice boxed = nullptr;
6050         DebugUtilsHelper debugUtilsHelper = DebugUtilsHelper::withUtilsDisabled();
6051         std::unique_ptr<ExternalFencePool<VulkanDispatch>> externalFencePool = nullptr;
6052 
6053         // True if this is a compressed image that needs to be decompressed on the GPU (with our
6054         // compute shader)
needGpuDecompressiongfxstream::vk::VkDecoderGlobalState::Impl::DeviceInfo6055         bool needGpuDecompression(const CompressedImageInfo& cmpInfo) {
6056             return needEmulatedDecompression(cmpInfo) && !cmpInfo.successfullyDecompressedOnCpu();
6057         }
needEmulatedDecompressiongfxstream::vk::VkDecoderGlobalState::Impl::DeviceInfo6058         bool needEmulatedDecompression(const CompressedImageInfo& imageInfo) {
6059             return ((imageInfo.isEtc2() && emulateTextureEtc2) ||
6060                     (imageInfo.isAstc() && emulateTextureAstc));
6061         }
needEmulatedDecompressiongfxstream::vk::VkDecoderGlobalState::Impl::DeviceInfo6062         bool needEmulatedDecompression(VkFormat format) {
6063             return (CompressedImageInfo::isEtc2(format) && emulateTextureEtc2) ||
6064                    (CompressedImageInfo::isAstc(format) && emulateTextureAstc);
6065         }
6066     };
6067 
6068     struct QueueInfo {
6069         Lock* lock = nullptr;
6070         VkDevice device;
6071         uint32_t queueFamilyIndex;
6072         VkQueue boxed = nullptr;
6073         uint32_t sequenceNumber = 0;
6074     };
6075 
6076     struct BufferInfo {
6077         VkDevice device;
6078         VkDeviceMemory memory = 0;
6079         VkDeviceSize memoryOffset = 0;
6080         VkDeviceSize size;
6081     };
6082 
6083     struct ImageInfo {
6084         VkDevice device;
6085         VkImageCreateInfo imageCreateInfoShallow;
6086         std::shared_ptr<AndroidNativeBufferInfo> anbInfo;
6087         CompressedImageInfo cmpInfo;
6088     };
6089 
6090     struct ImageViewInfo {
6091         VkDevice device;
6092         bool needEmulatedAlpha = false;
6093     };
6094 
6095     struct SamplerInfo {
6096         VkDevice device;
6097         bool needEmulatedAlpha = false;
6098         VkSamplerCreateInfo createInfo = {};
6099         VkSampler emulatedborderSampler = VK_NULL_HANDLE;
6100         android::base::BumpPool pool = android::base::BumpPool(256);
6101         SamplerInfo() = default;
operator =gfxstream::vk::VkDecoderGlobalState::Impl::SamplerInfo6102         SamplerInfo& operator=(const SamplerInfo& other) {
6103             deepcopy_VkSamplerCreateInfo(&pool, VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
6104                                          &other.createInfo, &createInfo);
6105             device = other.device;
6106             needEmulatedAlpha = other.needEmulatedAlpha;
6107             emulatedborderSampler = other.emulatedborderSampler;
6108             return *this;
6109         }
SamplerInfogfxstream::vk::VkDecoderGlobalState::Impl::SamplerInfo6110         SamplerInfo(const SamplerInfo& other) { *this = other; }
6111         SamplerInfo(SamplerInfo&& other) = delete;
6112         SamplerInfo& operator=(SamplerInfo&& other) = delete;
6113     };
6114 
6115     struct FenceInfo {
6116         VkDevice device = VK_NULL_HANDLE;
6117         VkFence boxed = VK_NULL_HANDLE;
6118         VulkanDispatch* vk = nullptr;
6119 
6120         StaticLock lock;
6121         android::base::ConditionVariable cv;
6122 
6123         enum class State {
6124             kWaitable,
6125             kNotWaitable,
6126             kWaiting,
6127         };
6128         State state = State::kNotWaitable;
6129 
6130         bool external = false;
6131     };
6132 
6133     struct SemaphoreInfo {
6134         VkDevice device;
6135         int externalHandleId = 0;
6136         VK_EXT_MEMORY_HANDLE externalHandle = VK_EXT_MEMORY_HANDLE_INVALID;
6137     };
6138 
6139     struct DescriptorSetLayoutInfo {
6140         VkDevice device = 0;
6141         VkDescriptorSetLayout boxed = 0;
6142         VkDescriptorSetLayoutCreateInfo createInfo;
6143         std::vector<VkDescriptorSetLayoutBinding> bindings;
6144     };
6145 
6146     struct DescriptorPoolInfo {
6147         VkDevice device = 0;
6148         VkDescriptorPool boxed = 0;
6149         struct PoolState {
6150             VkDescriptorType type;
6151             uint32_t descriptorCount;
6152             uint32_t used;
6153         };
6154 
6155         VkDescriptorPoolCreateInfo createInfo;
6156         uint32_t maxSets;
6157         uint32_t usedSets;
6158         std::vector<PoolState> pools;
6159 
6160         std::unordered_map<VkDescriptorSet, VkDescriptorSet> allocedSetsToBoxed;
6161         std::vector<uint64_t> poolIds;
6162     };
6163 
6164     struct DescriptorSetInfo {
6165         VkDescriptorPool pool;
6166         std::vector<VkDescriptorSetLayoutBinding> bindings;
6167     };
6168 
6169     struct ShaderModuleInfo {
6170         VkDevice device;
6171     };
6172 
6173     struct PipelineCacheInfo {
6174         VkDevice device;
6175     };
6176 
6177     struct PipelineInfo {
6178         VkDevice device;
6179     };
6180 
6181     struct RenderPassInfo {
6182         VkDevice device;
6183     };
6184 
6185     struct FramebufferInfo {
6186         VkDevice device;
6187     };
6188 
isBindingFeasibleForAlloc(const DescriptorPoolInfo::PoolState & poolState,const VkDescriptorSetLayoutBinding & binding)6189     bool isBindingFeasibleForAlloc(const DescriptorPoolInfo::PoolState& poolState,
6190                                    const VkDescriptorSetLayoutBinding& binding) {
6191         if (binding.descriptorCount && (poolState.type != binding.descriptorType)) {
6192             return false;
6193         }
6194 
6195         uint32_t availDescriptorCount = poolState.descriptorCount - poolState.used;
6196 
6197         if (availDescriptorCount < binding.descriptorCount) {
6198             return false;
6199         }
6200 
6201         return true;
6202     }
6203 
isBindingFeasibleForFree(const DescriptorPoolInfo::PoolState & poolState,const VkDescriptorSetLayoutBinding & binding)6204     bool isBindingFeasibleForFree(const DescriptorPoolInfo::PoolState& poolState,
6205                                   const VkDescriptorSetLayoutBinding& binding) {
6206         if (poolState.type != binding.descriptorType) return false;
6207         if (poolState.used < binding.descriptorCount) return false;
6208         return true;
6209     }
6210 
allocBindingFeasible(const VkDescriptorSetLayoutBinding & binding,DescriptorPoolInfo::PoolState & poolState)6211     void allocBindingFeasible(const VkDescriptorSetLayoutBinding& binding,
6212                               DescriptorPoolInfo::PoolState& poolState) {
6213         poolState.used += binding.descriptorCount;
6214     }
6215 
freeBindingFeasible(const VkDescriptorSetLayoutBinding & binding,DescriptorPoolInfo::PoolState & poolState)6216     void freeBindingFeasible(const VkDescriptorSetLayoutBinding& binding,
6217                              DescriptorPoolInfo::PoolState& poolState) {
6218         poolState.used -= binding.descriptorCount;
6219     }
6220 
validateDescriptorSetAllocLocked(const VkDescriptorSetAllocateInfo * pAllocateInfo)6221     VkResult validateDescriptorSetAllocLocked(const VkDescriptorSetAllocateInfo* pAllocateInfo) {
6222         auto* poolInfo = android::base::find(mDescriptorPoolInfo, pAllocateInfo->descriptorPool);
6223         if (!poolInfo) return VK_ERROR_INITIALIZATION_FAILED;
6224 
6225         // Check the number of sets available.
6226         auto setsAvailable = poolInfo->maxSets - poolInfo->usedSets;
6227 
6228         if (setsAvailable < pAllocateInfo->descriptorSetCount) {
6229             return VK_ERROR_OUT_OF_POOL_MEMORY;
6230         }
6231 
6232         // Perform simulated allocation and error out with
6233         // VK_ERROR_OUT_OF_POOL_MEMORY if it fails.
6234         std::vector<DescriptorPoolInfo::PoolState> poolCopy = poolInfo->pools;
6235 
6236         for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; ++i) {
6237             auto setLayoutInfo =
6238                 android::base::find(mDescriptorSetLayoutInfo, pAllocateInfo->pSetLayouts[i]);
6239             if (!setLayoutInfo) return VK_ERROR_INITIALIZATION_FAILED;
6240 
6241             for (const auto& binding : setLayoutInfo->bindings) {
6242                 bool success = false;
6243                 for (auto& pool : poolCopy) {
6244                     if (!isBindingFeasibleForAlloc(pool, binding)) continue;
6245 
6246                     success = true;
6247                     allocBindingFeasible(binding, pool);
6248                     break;
6249                 }
6250 
6251                 if (!success) {
6252                     return VK_ERROR_OUT_OF_POOL_MEMORY;
6253                 }
6254             }
6255         }
6256         return VK_SUCCESS;
6257     }
6258 
applyDescriptorSetAllocationLocked(DescriptorPoolInfo & poolInfo,const std::vector<VkDescriptorSetLayoutBinding> & bindings)6259     void applyDescriptorSetAllocationLocked(
6260         DescriptorPoolInfo& poolInfo, const std::vector<VkDescriptorSetLayoutBinding>& bindings) {
6261         ++poolInfo.usedSets;
6262         for (const auto& binding : bindings) {
6263             for (auto& pool : poolInfo.pools) {
6264                 if (!isBindingFeasibleForAlloc(pool, binding)) continue;
6265                 allocBindingFeasible(binding, pool);
6266                 break;
6267             }
6268         }
6269     }
6270 
removeDescriptorSetAllocationLocked(DescriptorPoolInfo & poolInfo,const std::vector<VkDescriptorSetLayoutBinding> & bindings)6271     void removeDescriptorSetAllocationLocked(
6272         DescriptorPoolInfo& poolInfo, const std::vector<VkDescriptorSetLayoutBinding>& bindings) {
6273         --poolInfo.usedSets;
6274         for (const auto& binding : bindings) {
6275             for (auto& pool : poolInfo.pools) {
6276                 if (!isBindingFeasibleForFree(pool, binding)) continue;
6277                 freeBindingFeasible(binding, pool);
6278                 break;
6279             }
6280         }
6281     }
6282 
6283     template <class T>
6284     class NonDispatchableHandleInfo {
6285        public:
6286         T underlying;
6287     };
6288 
6289     std::unordered_map<VkInstance, InstanceInfo> mInstanceInfo;
6290     std::unordered_map<VkPhysicalDevice, PhysicalDeviceInfo> mPhysdevInfo;
6291     std::unordered_map<VkDevice, DeviceInfo> mDeviceInfo;
6292     std::unordered_map<VkImage, ImageInfo> mImageInfo;
6293     std::unordered_map<VkImageView, ImageViewInfo> mImageViewInfo;
6294     std::unordered_map<VkSampler, SamplerInfo> mSamplerInfo;
6295     std::unordered_map<VkCommandBuffer, CommandBufferInfo> mCmdBufferInfo;
6296     std::unordered_map<VkCommandPool, CommandPoolInfo> mCmdPoolInfo;
6297     // TODO: release CommandBufferInfo when a command pool is reset/released
6298 
6299     // Back-reference to the physical device associated with a particular
6300     // VkDevice, and the VkDevice corresponding to a VkQueue.
6301     std::unordered_map<VkDevice, VkPhysicalDevice> mDeviceToPhysicalDevice;
6302     std::unordered_map<VkPhysicalDevice, VkInstance> mPhysicalDeviceToInstance;
6303 
6304     std::unordered_map<VkQueue, QueueInfo> mQueueInfo;
6305     std::unordered_map<VkBuffer, BufferInfo> mBufferInfo;
6306 
6307     std::unordered_map<VkDeviceMemory, MemoryInfo> mMemoryInfo;
6308 
6309     std::unordered_map<VkShaderModule, ShaderModuleInfo> mShaderModuleInfo;
6310     std::unordered_map<VkPipelineCache, PipelineCacheInfo> mPipelineCacheInfo;
6311     std::unordered_map<VkPipeline, PipelineInfo> mPipelineInfo;
6312     std::unordered_map<VkRenderPass, RenderPassInfo> mRenderPassInfo;
6313     std::unordered_map<VkFramebuffer, FramebufferInfo> mFramebufferInfo;
6314 
6315     std::unordered_map<VkSemaphore, SemaphoreInfo> mSemaphoreInfo;
6316     std::unordered_map<VkFence, FenceInfo> mFenceInfo;
6317 
6318     std::unordered_map<VkDescriptorSetLayout, DescriptorSetLayoutInfo> mDescriptorSetLayoutInfo;
6319     std::unordered_map<VkDescriptorPool, DescriptorPoolInfo> mDescriptorPoolInfo;
6320     std::unordered_map<VkDescriptorSet, DescriptorSetInfo> mDescriptorSetInfo;
6321 
6322 #ifdef _WIN32
6323     int mSemaphoreId = 1;
genSemaphoreId()6324     int genSemaphoreId() {
6325         if (mSemaphoreId == -1) {
6326             mSemaphoreId = 1;
6327         }
6328         int res = mSemaphoreId;
6329         ++mSemaphoreId;
6330         return res;
6331     }
6332     std::unordered_map<int, VkSemaphore> mExternalSemaphoresById;
6333 #endif
6334     std::unordered_map<VkDescriptorUpdateTemplate, DescriptorUpdateTemplateInfo>
6335         mDescriptorUpdateTemplateInfo;
6336 
6337     VkDecoderSnapshot mSnapshot;
6338 
6339     std::vector<uint64_t> mCreatedHandlesForSnapshotLoad;
6340     size_t mCreatedHandlesForSnapshotLoadIndex = 0;
6341 
6342     Lock mOccupiedGpasLock;
6343     // Back-reference to the VkDeviceMemory that is occupying a particular
6344     // guest physical address
6345     struct OccupiedGpaInfo {
6346         VulkanDispatch* vk;
6347         VkDevice device;
6348         VkDeviceMemory memory;
6349         uint64_t gpa;
6350         size_t sizeToPage;
6351     };
6352     std::unordered_map<uint64_t, OccupiedGpaInfo> mOccupiedGpas;
6353 
6354     struct LinearImageCreateInfo {
6355         VkExtent3D extent;
6356         VkFormat format;
6357         VkImageUsageFlags usage;
6358 
toDefaultVkgfxstream::vk::VkDecoderGlobalState::Impl::LinearImageCreateInfo6359         VkImageCreateInfo toDefaultVk() const {
6360             return VkImageCreateInfo{
6361                 .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
6362                 .pNext = nullptr,
6363                 .flags = {},
6364                 .imageType = VK_IMAGE_TYPE_2D,
6365                 .format = format,
6366                 .extent = extent,
6367                 .mipLevels = 1,
6368                 .arrayLayers = 1,
6369                 .samples = VK_SAMPLE_COUNT_1_BIT,
6370                 .tiling = VK_IMAGE_TILING_LINEAR,
6371                 .usage = usage,
6372                 .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
6373                 .queueFamilyIndexCount = 0,
6374                 .pQueueFamilyIndices = nullptr,
6375                 .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
6376             };
6377         }
6378 
6379         struct Hash {
operator ()gfxstream::vk::VkDecoderGlobalState::Impl::LinearImageCreateInfo::Hash6380             std::size_t operator()(const LinearImageCreateInfo& ci) const {
6381                 std::size_t s = 0;
6382                 // Magic number used in boost::hash_combine().
6383                 constexpr size_t kHashMagic = 0x9e3779b9;
6384                 s ^= std::hash<uint32_t>{}(ci.extent.width) + kHashMagic + (s << 6) + (s >> 2);
6385                 s ^= std::hash<uint32_t>{}(ci.extent.height) + kHashMagic + (s << 6) + (s >> 2);
6386                 s ^= std::hash<uint32_t>{}(ci.extent.depth) + kHashMagic + (s << 6) + (s >> 2);
6387                 s ^= std::hash<VkFormat>{}(ci.format) + kHashMagic + (s << 6) + (s >> 2);
6388                 s ^= std::hash<VkImageUsageFlags>{}(ci.usage) + kHashMagic + (s << 6) + (s >> 2);
6389                 return s;
6390             }
6391         };
6392     };
6393 
operator ==(const LinearImageCreateInfo & a,const LinearImageCreateInfo & b)6394     friend bool operator==(const LinearImageCreateInfo& a, const LinearImageCreateInfo& b) {
6395         return a.extent.width == b.extent.width && a.extent.height == b.extent.height &&
6396                a.extent.depth == b.extent.depth && a.format == b.format && a.usage == b.usage;
6397     }
6398 
6399     struct LinearImageProperties {
6400         VkDeviceSize offset;
6401         VkDeviceSize rowPitchAlignment;
6402     };
6403 
6404     // TODO(liyl): Remove after removing the old vkGetLinearImageLayoutGOOGLE.
6405     std::unordered_map<VkFormat, LinearImageProperties> mPerFormatLinearImageProperties;
6406 
6407     std::unordered_map<LinearImageCreateInfo, LinearImageProperties, LinearImageCreateInfo::Hash>
6408         mLinearImageProperties;
6409 };
6410 
VkDecoderGlobalState()6411 VkDecoderGlobalState::VkDecoderGlobalState() : mImpl(new VkDecoderGlobalState::Impl()) {}
6412 
6413 VkDecoderGlobalState::~VkDecoderGlobalState() = default;
6414 
6415 static VkDecoderGlobalState* sGlobalDecoderState = nullptr;
6416 
6417 // static
get()6418 VkDecoderGlobalState* VkDecoderGlobalState::get() {
6419     if (sGlobalDecoderState) return sGlobalDecoderState;
6420     sGlobalDecoderState = new VkDecoderGlobalState;
6421     return sGlobalDecoderState;
6422 }
6423 
6424 // static
reset()6425 void VkDecoderGlobalState::reset() {
6426     delete sGlobalDecoderState;
6427     sGlobalDecoderState = nullptr;
6428 }
6429 
6430 // Snapshots
snapshotsEnabled() const6431 bool VkDecoderGlobalState::snapshotsEnabled() const { return mImpl->snapshotsEnabled(); }
6432 
vkCleanupEnabled() const6433 bool VkDecoderGlobalState::vkCleanupEnabled() const { return mImpl->vkCleanupEnabled(); }
6434 
save(android::base::Stream * stream)6435 void VkDecoderGlobalState::save(android::base::Stream* stream) { mImpl->save(stream); }
6436 
load(android::base::Stream * stream,GfxApiLogger & gfxLogger,HealthMonitor<> * healthMonitor)6437 void VkDecoderGlobalState::load(android::base::Stream* stream, GfxApiLogger& gfxLogger,
6438                                 HealthMonitor<>* healthMonitor) {
6439     mImpl->load(stream, gfxLogger, healthMonitor);
6440 }
6441 
lock()6442 void VkDecoderGlobalState::lock() { mImpl->lock(); }
6443 
unlock()6444 void VkDecoderGlobalState::unlock() { mImpl->unlock(); }
6445 
setCreatedHandlesForSnapshotLoad(const unsigned char * buffer)6446 size_t VkDecoderGlobalState::setCreatedHandlesForSnapshotLoad(const unsigned char* buffer) {
6447     return mImpl->setCreatedHandlesForSnapshotLoad(buffer);
6448 }
6449 
clearCreatedHandlesForSnapshotLoad()6450 void VkDecoderGlobalState::clearCreatedHandlesForSnapshotLoad() {
6451     mImpl->clearCreatedHandlesForSnapshotLoad();
6452 }
6453 
on_vkEnumerateInstanceVersion(android::base::BumpPool * pool,uint32_t * pApiVersion)6454 VkResult VkDecoderGlobalState::on_vkEnumerateInstanceVersion(android::base::BumpPool* pool,
6455                                                              uint32_t* pApiVersion) {
6456     return mImpl->on_vkEnumerateInstanceVersion(pool, pApiVersion);
6457 }
6458 
on_vkCreateInstance(android::base::BumpPool * pool,const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)6459 VkResult VkDecoderGlobalState::on_vkCreateInstance(android::base::BumpPool* pool,
6460                                                    const VkInstanceCreateInfo* pCreateInfo,
6461                                                    const VkAllocationCallbacks* pAllocator,
6462                                                    VkInstance* pInstance) {
6463     return mImpl->on_vkCreateInstance(pool, pCreateInfo, pAllocator, pInstance);
6464 }
6465 
on_vkDestroyInstance(android::base::BumpPool * pool,VkInstance instance,const VkAllocationCallbacks * pAllocator)6466 void VkDecoderGlobalState::on_vkDestroyInstance(android::base::BumpPool* pool, VkInstance instance,
6467                                                 const VkAllocationCallbacks* pAllocator) {
6468     mImpl->on_vkDestroyInstance(pool, instance, pAllocator);
6469 }
6470 
on_vkEnumeratePhysicalDevices(android::base::BumpPool * pool,VkInstance instance,uint32_t * physicalDeviceCount,VkPhysicalDevice * physicalDevices)6471 VkResult VkDecoderGlobalState::on_vkEnumeratePhysicalDevices(android::base::BumpPool* pool,
6472                                                              VkInstance instance,
6473                                                              uint32_t* physicalDeviceCount,
6474                                                              VkPhysicalDevice* physicalDevices) {
6475     return mImpl->on_vkEnumeratePhysicalDevices(pool, instance, physicalDeviceCount,
6476                                                 physicalDevices);
6477 }
6478 
on_vkGetPhysicalDeviceFeatures(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures * pFeatures)6479 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFeatures(android::base::BumpPool* pool,
6480                                                           VkPhysicalDevice physicalDevice,
6481                                                           VkPhysicalDeviceFeatures* pFeatures) {
6482     mImpl->on_vkGetPhysicalDeviceFeatures(pool, physicalDevice, pFeatures);
6483 }
6484 
on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)6485 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool* pool,
6486                                                            VkPhysicalDevice physicalDevice,
6487                                                            VkPhysicalDeviceFeatures2* pFeatures) {
6488     mImpl->on_vkGetPhysicalDeviceFeatures2(pool, physicalDevice, pFeatures);
6489 }
6490 
on_vkGetPhysicalDeviceFeatures2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2KHR * pFeatures)6491 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFeatures2KHR(
6492     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
6493     VkPhysicalDeviceFeatures2KHR* pFeatures) {
6494     mImpl->on_vkGetPhysicalDeviceFeatures2(pool, physicalDevice, pFeatures);
6495 }
6496 
on_vkGetPhysicalDeviceImageFormatProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)6497 VkResult VkDecoderGlobalState::on_vkGetPhysicalDeviceImageFormatProperties(
6498     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
6499     VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
6500     VkImageFormatProperties* pImageFormatProperties) {
6501     return mImpl->on_vkGetPhysicalDeviceImageFormatProperties(
6502         pool, physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
6503 }
on_vkGetPhysicalDeviceImageFormatProperties2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)6504 VkResult VkDecoderGlobalState::on_vkGetPhysicalDeviceImageFormatProperties2(
6505     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
6506     const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
6507     VkImageFormatProperties2* pImageFormatProperties) {
6508     return mImpl->on_vkGetPhysicalDeviceImageFormatProperties2(
6509         pool, physicalDevice, pImageFormatInfo, pImageFormatProperties);
6510 }
on_vkGetPhysicalDeviceImageFormatProperties2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)6511 VkResult VkDecoderGlobalState::on_vkGetPhysicalDeviceImageFormatProperties2KHR(
6512     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
6513     const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
6514     VkImageFormatProperties2* pImageFormatProperties) {
6515     return mImpl->on_vkGetPhysicalDeviceImageFormatProperties2(
6516         pool, physicalDevice, pImageFormatInfo, pImageFormatProperties);
6517 }
6518 
on_vkGetPhysicalDeviceFormatProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)6519 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFormatProperties(
6520     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
6521     VkFormatProperties* pFormatProperties) {
6522     mImpl->on_vkGetPhysicalDeviceFormatProperties(pool, physicalDevice, format, pFormatProperties);
6523 }
6524 
on_vkGetPhysicalDeviceFormatProperties2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)6525 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFormatProperties2(
6526     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
6527     VkFormatProperties2* pFormatProperties) {
6528     mImpl->on_vkGetPhysicalDeviceFormatProperties2(pool, physicalDevice, format, pFormatProperties);
6529 }
6530 
on_vkGetPhysicalDeviceFormatProperties2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)6531 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFormatProperties2KHR(
6532     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
6533     VkFormatProperties2* pFormatProperties) {
6534     mImpl->on_vkGetPhysicalDeviceFormatProperties2(pool, physicalDevice, format, pFormatProperties);
6535 }
6536 
on_vkGetPhysicalDeviceProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties * pProperties)6537 void VkDecoderGlobalState::on_vkGetPhysicalDeviceProperties(
6538     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
6539     VkPhysicalDeviceProperties* pProperties) {
6540     mImpl->on_vkGetPhysicalDeviceProperties(pool, physicalDevice, pProperties);
6541 }
6542 
on_vkGetPhysicalDeviceProperties2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)6543 void VkDecoderGlobalState::on_vkGetPhysicalDeviceProperties2(
6544     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
6545     VkPhysicalDeviceProperties2* pProperties) {
6546     mImpl->on_vkGetPhysicalDeviceProperties2(pool, physicalDevice, pProperties);
6547 }
6548 
on_vkGetPhysicalDeviceProperties2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)6549 void VkDecoderGlobalState::on_vkGetPhysicalDeviceProperties2KHR(
6550     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
6551     VkPhysicalDeviceProperties2* pProperties) {
6552     mImpl->on_vkGetPhysicalDeviceProperties2(pool, physicalDevice, pProperties);
6553 }
6554 
on_vkGetPhysicalDeviceMemoryProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)6555 void VkDecoderGlobalState::on_vkGetPhysicalDeviceMemoryProperties(
6556     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
6557     VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
6558     mImpl->on_vkGetPhysicalDeviceMemoryProperties(pool, physicalDevice, pMemoryProperties);
6559 }
6560 
on_vkGetPhysicalDeviceMemoryProperties2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)6561 void VkDecoderGlobalState::on_vkGetPhysicalDeviceMemoryProperties2(
6562     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
6563     VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
6564     mImpl->on_vkGetPhysicalDeviceMemoryProperties2(pool, physicalDevice, pMemoryProperties);
6565 }
6566 
on_vkGetPhysicalDeviceMemoryProperties2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)6567 void VkDecoderGlobalState::on_vkGetPhysicalDeviceMemoryProperties2KHR(
6568     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
6569     VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
6570     mImpl->on_vkGetPhysicalDeviceMemoryProperties2(pool, physicalDevice, pMemoryProperties);
6571 }
6572 
on_vkEnumerateDeviceExtensionProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)6573 VkResult VkDecoderGlobalState::on_vkEnumerateDeviceExtensionProperties(
6574     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, const char* pLayerName,
6575     uint32_t* pPropertyCount, VkExtensionProperties* pProperties) {
6576     return mImpl->on_vkEnumerateDeviceExtensionProperties(pool, physicalDevice, pLayerName,
6577                                                           pPropertyCount, pProperties);
6578 }
6579 
on_vkCreateDevice(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)6580 VkResult VkDecoderGlobalState::on_vkCreateDevice(android::base::BumpPool* pool,
6581                                                  VkPhysicalDevice physicalDevice,
6582                                                  const VkDeviceCreateInfo* pCreateInfo,
6583                                                  const VkAllocationCallbacks* pAllocator,
6584                                                  VkDevice* pDevice) {
6585     return mImpl->on_vkCreateDevice(pool, physicalDevice, pCreateInfo, pAllocator, pDevice);
6586 }
6587 
on_vkGetDeviceQueue(android::base::BumpPool * pool,VkDevice device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)6588 void VkDecoderGlobalState::on_vkGetDeviceQueue(android::base::BumpPool* pool, VkDevice device,
6589                                                uint32_t queueFamilyIndex, uint32_t queueIndex,
6590                                                VkQueue* pQueue) {
6591     mImpl->on_vkGetDeviceQueue(pool, device, queueFamilyIndex, queueIndex, pQueue);
6592 }
6593 
on_vkGetDeviceQueue2(android::base::BumpPool * pool,VkDevice device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)6594 void VkDecoderGlobalState::on_vkGetDeviceQueue2(android::base::BumpPool* pool, VkDevice device,
6595                                                 const VkDeviceQueueInfo2* pQueueInfo,
6596                                                 VkQueue* pQueue) {
6597     mImpl->on_vkGetDeviceQueue2(pool, device, pQueueInfo, pQueue);
6598 }
6599 
on_vkDestroyDevice(android::base::BumpPool * pool,VkDevice device,const VkAllocationCallbacks * pAllocator)6600 void VkDecoderGlobalState::on_vkDestroyDevice(android::base::BumpPool* pool, VkDevice device,
6601                                               const VkAllocationCallbacks* pAllocator) {
6602     mImpl->on_vkDestroyDevice(pool, device, pAllocator);
6603 }
6604 
on_vkCreateBuffer(android::base::BumpPool * pool,VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)6605 VkResult VkDecoderGlobalState::on_vkCreateBuffer(android::base::BumpPool* pool, VkDevice device,
6606                                                  const VkBufferCreateInfo* pCreateInfo,
6607                                                  const VkAllocationCallbacks* pAllocator,
6608                                                  VkBuffer* pBuffer) {
6609     return mImpl->on_vkCreateBuffer(pool, device, pCreateInfo, pAllocator, pBuffer);
6610 }
6611 
on_vkDestroyBuffer(android::base::BumpPool * pool,VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)6612 void VkDecoderGlobalState::on_vkDestroyBuffer(android::base::BumpPool* pool, VkDevice device,
6613                                               VkBuffer buffer,
6614                                               const VkAllocationCallbacks* pAllocator) {
6615     mImpl->on_vkDestroyBuffer(pool, device, buffer, pAllocator);
6616 }
6617 
on_vkBindBufferMemory(android::base::BumpPool * pool,VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)6618 VkResult VkDecoderGlobalState::on_vkBindBufferMemory(android::base::BumpPool* pool, VkDevice device,
6619                                                      VkBuffer buffer, VkDeviceMemory memory,
6620                                                      VkDeviceSize memoryOffset) {
6621     return mImpl->on_vkBindBufferMemory(pool, device, buffer, memory, memoryOffset);
6622 }
6623 
on_vkBindBufferMemory2(android::base::BumpPool * pool,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)6624 VkResult VkDecoderGlobalState::on_vkBindBufferMemory2(android::base::BumpPool* pool,
6625                                                       VkDevice device, uint32_t bindInfoCount,
6626                                                       const VkBindBufferMemoryInfo* pBindInfos) {
6627     return mImpl->on_vkBindBufferMemory2(pool, device, bindInfoCount, pBindInfos);
6628 }
6629 
on_vkBindBufferMemory2KHR(android::base::BumpPool * pool,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)6630 VkResult VkDecoderGlobalState::on_vkBindBufferMemory2KHR(android::base::BumpPool* pool,
6631                                                          VkDevice device, uint32_t bindInfoCount,
6632                                                          const VkBindBufferMemoryInfo* pBindInfos) {
6633     return mImpl->on_vkBindBufferMemory2KHR(pool, device, bindInfoCount, pBindInfos);
6634 }
6635 
on_vkCreateImage(android::base::BumpPool * pool,VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)6636 VkResult VkDecoderGlobalState::on_vkCreateImage(android::base::BumpPool* pool, VkDevice device,
6637                                                 const VkImageCreateInfo* pCreateInfo,
6638                                                 const VkAllocationCallbacks* pAllocator,
6639                                                 VkImage* pImage) {
6640     return mImpl->on_vkCreateImage(pool, device, pCreateInfo, pAllocator, pImage);
6641 }
6642 
on_vkDestroyImage(android::base::BumpPool * pool,VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)6643 void VkDecoderGlobalState::on_vkDestroyImage(android::base::BumpPool* pool, VkDevice device,
6644                                              VkImage image,
6645                                              const VkAllocationCallbacks* pAllocator) {
6646     mImpl->on_vkDestroyImage(pool, device, image, pAllocator);
6647 }
6648 
on_vkBindImageMemory(android::base::BumpPool * pool,VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)6649 VkResult VkDecoderGlobalState::on_vkBindImageMemory(android::base::BumpPool* pool, VkDevice device,
6650                                                     VkImage image, VkDeviceMemory memory,
6651                                                     VkDeviceSize memoryOffset) {
6652     return mImpl->on_vkBindImageMemory(pool, device, image, memory, memoryOffset);
6653 }
6654 
on_vkBindImageMemory2(android::base::BumpPool * pool,VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)6655 VkResult VkDecoderGlobalState::on_vkBindImageMemory2(android::base::BumpPool* pool, VkDevice device,
6656                                                      uint32_t bindInfoCount,
6657                                                      const VkBindImageMemoryInfo* pBindInfos) {
6658     return mImpl->on_vkBindImageMemory2(pool, device, bindInfoCount, pBindInfos);
6659 }
6660 
on_vkBindImageMemory2KHR(android::base::BumpPool * pool,VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)6661 VkResult VkDecoderGlobalState::on_vkBindImageMemory2KHR(android::base::BumpPool* pool,
6662                                                         VkDevice device, uint32_t bindInfoCount,
6663                                                         const VkBindImageMemoryInfo* pBindInfos) {
6664     return mImpl->on_vkBindImageMemory2(pool, device, bindInfoCount, pBindInfos);
6665 }
6666 
on_vkCreateImageView(android::base::BumpPool * pool,VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)6667 VkResult VkDecoderGlobalState::on_vkCreateImageView(android::base::BumpPool* pool, VkDevice device,
6668                                                     const VkImageViewCreateInfo* pCreateInfo,
6669                                                     const VkAllocationCallbacks* pAllocator,
6670                                                     VkImageView* pView) {
6671     return mImpl->on_vkCreateImageView(pool, device, pCreateInfo, pAllocator, pView);
6672 }
6673 
on_vkDestroyImageView(android::base::BumpPool * pool,VkDevice device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)6674 void VkDecoderGlobalState::on_vkDestroyImageView(android::base::BumpPool* pool, VkDevice device,
6675                                                  VkImageView imageView,
6676                                                  const VkAllocationCallbacks* pAllocator) {
6677     mImpl->on_vkDestroyImageView(pool, device, imageView, pAllocator);
6678 }
6679 
on_vkCreateSampler(android::base::BumpPool * pool,VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)6680 VkResult VkDecoderGlobalState::on_vkCreateSampler(android::base::BumpPool* pool, VkDevice device,
6681                                                   const VkSamplerCreateInfo* pCreateInfo,
6682                                                   const VkAllocationCallbacks* pAllocator,
6683                                                   VkSampler* pSampler) {
6684     return mImpl->on_vkCreateSampler(pool, device, pCreateInfo, pAllocator, pSampler);
6685 }
6686 
on_vkDestroySampler(android::base::BumpPool * pool,VkDevice device,VkSampler sampler,const VkAllocationCallbacks * pAllocator)6687 void VkDecoderGlobalState::on_vkDestroySampler(android::base::BumpPool* pool, VkDevice device,
6688                                                VkSampler sampler,
6689                                                const VkAllocationCallbacks* pAllocator) {
6690     mImpl->on_vkDestroySampler(pool, device, sampler, pAllocator);
6691 }
6692 
on_vkCreateSemaphore(android::base::BumpPool * pool,VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)6693 VkResult VkDecoderGlobalState::on_vkCreateSemaphore(android::base::BumpPool* pool, VkDevice device,
6694                                                     const VkSemaphoreCreateInfo* pCreateInfo,
6695                                                     const VkAllocationCallbacks* pAllocator,
6696                                                     VkSemaphore* pSemaphore) {
6697     return mImpl->on_vkCreateSemaphore(pool, device, pCreateInfo, pAllocator, pSemaphore);
6698 }
6699 
on_vkImportSemaphoreFdKHR(android::base::BumpPool * pool,VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo)6700 VkResult VkDecoderGlobalState::on_vkImportSemaphoreFdKHR(
6701     android::base::BumpPool* pool, VkDevice device,
6702     const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
6703     return mImpl->on_vkImportSemaphoreFdKHR(pool, device, pImportSemaphoreFdInfo);
6704 }
6705 
on_vkGetSemaphoreFdKHR(android::base::BumpPool * pool,VkDevice device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)6706 VkResult VkDecoderGlobalState::on_vkGetSemaphoreFdKHR(android::base::BumpPool* pool,
6707                                                       VkDevice device,
6708                                                       const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
6709                                                       int* pFd) {
6710     return mImpl->on_vkGetSemaphoreFdKHR(pool, device, pGetFdInfo, pFd);
6711 }
6712 
on_vkDestroySemaphore(android::base::BumpPool * pool,VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)6713 void VkDecoderGlobalState::on_vkDestroySemaphore(android::base::BumpPool* pool, VkDevice device,
6714                                                  VkSemaphore semaphore,
6715                                                  const VkAllocationCallbacks* pAllocator) {
6716     mImpl->on_vkDestroySemaphore(pool, device, semaphore, pAllocator);
6717 }
6718 
on_vkCreateFence(android::base::BumpPool * pool,VkDevice device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)6719 VkResult VkDecoderGlobalState::on_vkCreateFence(android::base::BumpPool* pool, VkDevice device,
6720                                                 const VkFenceCreateInfo* pCreateInfo,
6721                                                 const VkAllocationCallbacks* pAllocator,
6722                                                 VkFence* pFence) {
6723     return mImpl->on_vkCreateFence(pool, device, pCreateInfo, pAllocator, pFence);
6724 }
6725 
on_vkResetFences(android::base::BumpPool * pool,VkDevice device,uint32_t fenceCount,const VkFence * pFences)6726 VkResult VkDecoderGlobalState::on_vkResetFences(android::base::BumpPool* pool, VkDevice device,
6727                                                 uint32_t fenceCount, const VkFence* pFences) {
6728     return mImpl->on_vkResetFences(pool, device, fenceCount, pFences);
6729 }
6730 
on_vkDestroyFence(android::base::BumpPool * pool,VkDevice device,VkFence fence,const VkAllocationCallbacks * pAllocator)6731 void VkDecoderGlobalState::on_vkDestroyFence(android::base::BumpPool* pool, VkDevice device,
6732                                              VkFence fence,
6733                                              const VkAllocationCallbacks* pAllocator) {
6734     return mImpl->on_vkDestroyFence(pool, device, fence, pAllocator);
6735 }
6736 
on_vkCreateDescriptorSetLayout(android::base::BumpPool * pool,VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)6737 VkResult VkDecoderGlobalState::on_vkCreateDescriptorSetLayout(
6738     android::base::BumpPool* pool, VkDevice device,
6739     const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
6740     VkDescriptorSetLayout* pSetLayout) {
6741     return mImpl->on_vkCreateDescriptorSetLayout(pool, device, pCreateInfo, pAllocator, pSetLayout);
6742 }
6743 
on_vkDestroyDescriptorSetLayout(android::base::BumpPool * pool,VkDevice device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)6744 void VkDecoderGlobalState::on_vkDestroyDescriptorSetLayout(
6745     android::base::BumpPool* pool, VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
6746     const VkAllocationCallbacks* pAllocator) {
6747     mImpl->on_vkDestroyDescriptorSetLayout(pool, device, descriptorSetLayout, pAllocator);
6748 }
6749 
on_vkCreateDescriptorPool(android::base::BumpPool * pool,VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)6750 VkResult VkDecoderGlobalState::on_vkCreateDescriptorPool(
6751     android::base::BumpPool* pool, VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo,
6752     const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool) {
6753     return mImpl->on_vkCreateDescriptorPool(pool, device, pCreateInfo, pAllocator, pDescriptorPool);
6754 }
6755 
on_vkDestroyDescriptorPool(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)6756 void VkDecoderGlobalState::on_vkDestroyDescriptorPool(android::base::BumpPool* pool,
6757                                                       VkDevice device,
6758                                                       VkDescriptorPool descriptorPool,
6759                                                       const VkAllocationCallbacks* pAllocator) {
6760     mImpl->on_vkDestroyDescriptorPool(pool, device, descriptorPool, pAllocator);
6761 }
6762 
on_vkResetDescriptorPool(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)6763 VkResult VkDecoderGlobalState::on_vkResetDescriptorPool(android::base::BumpPool* pool,
6764                                                         VkDevice device,
6765                                                         VkDescriptorPool descriptorPool,
6766                                                         VkDescriptorPoolResetFlags flags) {
6767     return mImpl->on_vkResetDescriptorPool(pool, device, descriptorPool, flags);
6768 }
6769 
on_vkAllocateDescriptorSets(android::base::BumpPool * pool,VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)6770 VkResult VkDecoderGlobalState::on_vkAllocateDescriptorSets(
6771     android::base::BumpPool* pool, VkDevice device,
6772     const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets) {
6773     return mImpl->on_vkAllocateDescriptorSets(pool, device, pAllocateInfo, pDescriptorSets);
6774 }
6775 
on_vkFreeDescriptorSets(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)6776 VkResult VkDecoderGlobalState::on_vkFreeDescriptorSets(android::base::BumpPool* pool,
6777                                                        VkDevice device,
6778                                                        VkDescriptorPool descriptorPool,
6779                                                        uint32_t descriptorSetCount,
6780                                                        const VkDescriptorSet* pDescriptorSets) {
6781     return mImpl->on_vkFreeDescriptorSets(pool, device, descriptorPool, descriptorSetCount,
6782                                           pDescriptorSets);
6783 }
6784 
on_vkUpdateDescriptorSets(android::base::BumpPool * pool,VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)6785 void VkDecoderGlobalState::on_vkUpdateDescriptorSets(android::base::BumpPool* pool, VkDevice device,
6786                                                      uint32_t descriptorWriteCount,
6787                                                      const VkWriteDescriptorSet* pDescriptorWrites,
6788                                                      uint32_t descriptorCopyCount,
6789                                                      const VkCopyDescriptorSet* pDescriptorCopies) {
6790     mImpl->on_vkUpdateDescriptorSets(pool, device, descriptorWriteCount, pDescriptorWrites,
6791                                      descriptorCopyCount, pDescriptorCopies);
6792 }
6793 
on_vkCreateShaderModule(android::base::BumpPool * pool,VkDevice boxed_device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule)6794 VkResult VkDecoderGlobalState::on_vkCreateShaderModule(android::base::BumpPool* pool,
6795                                                        VkDevice boxed_device,
6796                                                        const VkShaderModuleCreateInfo* pCreateInfo,
6797                                                        const VkAllocationCallbacks* pAllocator,
6798                                                        VkShaderModule* pShaderModule) {
6799     return mImpl->on_vkCreateShaderModule(pool, boxed_device, pCreateInfo, pAllocator,
6800                                           pShaderModule);
6801 }
6802 
on_vkDestroyShaderModule(android::base::BumpPool * pool,VkDevice boxed_device,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)6803 void VkDecoderGlobalState::on_vkDestroyShaderModule(android::base::BumpPool* pool,
6804                                                     VkDevice boxed_device,
6805                                                     VkShaderModule shaderModule,
6806                                                     const VkAllocationCallbacks* pAllocator) {
6807     mImpl->on_vkDestroyShaderModule(pool, boxed_device, shaderModule, pAllocator);
6808 }
6809 
on_vkCreatePipelineCache(android::base::BumpPool * pool,VkDevice boxed_device,const VkPipelineCacheCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineCache * pPipelineCache)6810 VkResult VkDecoderGlobalState::on_vkCreatePipelineCache(
6811     android::base::BumpPool* pool, VkDevice boxed_device,
6812     const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
6813     VkPipelineCache* pPipelineCache) {
6814     return mImpl->on_vkCreatePipelineCache(pool, boxed_device, pCreateInfo, pAllocator,
6815                                            pPipelineCache);
6816 }
6817 
on_vkDestroyPipelineCache(android::base::BumpPool * pool,VkDevice boxed_device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)6818 void VkDecoderGlobalState::on_vkDestroyPipelineCache(android::base::BumpPool* pool,
6819                                                      VkDevice boxed_device,
6820                                                      VkPipelineCache pipelineCache,
6821                                                      const VkAllocationCallbacks* pAllocator) {
6822     mImpl->on_vkDestroyPipelineCache(pool, boxed_device, pipelineCache, pAllocator);
6823 }
6824 
on_vkCreateGraphicsPipelines(android::base::BumpPool * pool,VkDevice boxed_device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)6825 VkResult VkDecoderGlobalState::on_vkCreateGraphicsPipelines(
6826     android::base::BumpPool* pool, VkDevice boxed_device, VkPipelineCache pipelineCache,
6827     uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos,
6828     const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) {
6829     return mImpl->on_vkCreateGraphicsPipelines(pool, boxed_device, pipelineCache, createInfoCount,
6830                                                pCreateInfos, pAllocator, pPipelines);
6831 }
6832 
on_vkDestroyPipeline(android::base::BumpPool * pool,VkDevice boxed_device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)6833 void VkDecoderGlobalState::on_vkDestroyPipeline(android::base::BumpPool* pool,
6834                                                 VkDevice boxed_device, VkPipeline pipeline,
6835                                                 const VkAllocationCallbacks* pAllocator) {
6836     mImpl->on_vkDestroyPipeline(pool, boxed_device, pipeline, pAllocator);
6837 }
6838 
on_vkCmdCopyBufferToImage(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions,const VkDecoderContext & context)6839 void VkDecoderGlobalState::on_vkCmdCopyBufferToImage(
6840     android::base::BumpPool* pool, VkCommandBuffer commandBuffer, VkBuffer srcBuffer,
6841     VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
6842     const VkBufferImageCopy* pRegions, const VkDecoderContext& context) {
6843     mImpl->on_vkCmdCopyBufferToImage(pool, commandBuffer, srcBuffer, dstImage, dstImageLayout,
6844                                      regionCount, pRegions, context);
6845 }
6846 
on_vkCmdCopyImage(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)6847 void VkDecoderGlobalState::on_vkCmdCopyImage(android::base::BumpPool* pool,
6848                                              VkCommandBuffer commandBuffer, VkImage srcImage,
6849                                              VkImageLayout srcImageLayout, VkImage dstImage,
6850                                              VkImageLayout dstImageLayout, uint32_t regionCount,
6851                                              const VkImageCopy* pRegions) {
6852     mImpl->on_vkCmdCopyImage(pool, commandBuffer, srcImage, srcImageLayout, dstImage,
6853                              dstImageLayout, regionCount, pRegions);
6854 }
on_vkCmdCopyImageToBuffer(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)6855 void VkDecoderGlobalState::on_vkCmdCopyImageToBuffer(android::base::BumpPool* pool,
6856                                                      VkCommandBuffer commandBuffer,
6857                                                      VkImage srcImage, VkImageLayout srcImageLayout,
6858                                                      VkBuffer dstBuffer, uint32_t regionCount,
6859                                                      const VkBufferImageCopy* pRegions) {
6860     mImpl->on_vkCmdCopyImageToBuffer(pool, commandBuffer, srcImage, srcImageLayout, dstBuffer,
6861                                      regionCount, pRegions);
6862 }
6863 
on_vkGetImageMemoryRequirements(android::base::BumpPool * pool,VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)6864 void VkDecoderGlobalState::on_vkGetImageMemoryRequirements(
6865     android::base::BumpPool* pool, VkDevice device, VkImage image,
6866     VkMemoryRequirements* pMemoryRequirements) {
6867     mImpl->on_vkGetImageMemoryRequirements(pool, device, image, pMemoryRequirements);
6868 }
6869 
on_vkGetImageMemoryRequirements2(android::base::BumpPool * pool,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)6870 void VkDecoderGlobalState::on_vkGetImageMemoryRequirements2(
6871     android::base::BumpPool* pool, VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
6872     VkMemoryRequirements2* pMemoryRequirements) {
6873     mImpl->on_vkGetImageMemoryRequirements2(pool, device, pInfo, pMemoryRequirements);
6874 }
6875 
on_vkGetImageMemoryRequirements2KHR(android::base::BumpPool * pool,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)6876 void VkDecoderGlobalState::on_vkGetImageMemoryRequirements2KHR(
6877     android::base::BumpPool* pool, VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
6878     VkMemoryRequirements2* pMemoryRequirements) {
6879     mImpl->on_vkGetImageMemoryRequirements2(pool, device, pInfo, pMemoryRequirements);
6880 }
6881 
on_vkGetBufferMemoryRequirements(android::base::BumpPool * pool,VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)6882 void VkDecoderGlobalState::on_vkGetBufferMemoryRequirements(
6883     android::base::BumpPool* pool, VkDevice device, VkBuffer buffer,
6884     VkMemoryRequirements* pMemoryRequirements) {
6885     mImpl->on_vkGetBufferMemoryRequirements(pool, device, buffer, pMemoryRequirements);
6886 }
6887 
on_vkGetBufferMemoryRequirements2(android::base::BumpPool * pool,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)6888 void VkDecoderGlobalState::on_vkGetBufferMemoryRequirements2(
6889     android::base::BumpPool* pool, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
6890     VkMemoryRequirements2* pMemoryRequirements) {
6891     mImpl->on_vkGetBufferMemoryRequirements2(pool, device, pInfo, pMemoryRequirements);
6892 }
6893 
on_vkGetBufferMemoryRequirements2KHR(android::base::BumpPool * pool,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)6894 void VkDecoderGlobalState::on_vkGetBufferMemoryRequirements2KHR(
6895     android::base::BumpPool* pool, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
6896     VkMemoryRequirements2* pMemoryRequirements) {
6897     mImpl->on_vkGetBufferMemoryRequirements2(pool, device, pInfo, pMemoryRequirements);
6898 }
6899 
on_vkCmdPipelineBarrier(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)6900 void VkDecoderGlobalState::on_vkCmdPipelineBarrier(
6901     android::base::BumpPool* pool, VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
6902     VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
6903     uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
6904     uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
6905     uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
6906     mImpl->on_vkCmdPipelineBarrier(pool, commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
6907                                    memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
6908                                    pBufferMemoryBarriers, imageMemoryBarrierCount,
6909                                    pImageMemoryBarriers);
6910 }
6911 
on_vkAllocateMemory(android::base::BumpPool * pool,VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)6912 VkResult VkDecoderGlobalState::on_vkAllocateMemory(android::base::BumpPool* pool, VkDevice device,
6913                                                    const VkMemoryAllocateInfo* pAllocateInfo,
6914                                                    const VkAllocationCallbacks* pAllocator,
6915                                                    VkDeviceMemory* pMemory) {
6916     return mImpl->on_vkAllocateMemory(pool, device, pAllocateInfo, pAllocator, pMemory);
6917 }
6918 
on_vkFreeMemory(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)6919 void VkDecoderGlobalState::on_vkFreeMemory(android::base::BumpPool* pool, VkDevice device,
6920                                            VkDeviceMemory memory,
6921                                            const VkAllocationCallbacks* pAllocator) {
6922     mImpl->on_vkFreeMemory(pool, device, memory, pAllocator);
6923 }
6924 
on_vkMapMemory(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)6925 VkResult VkDecoderGlobalState::on_vkMapMemory(android::base::BumpPool* pool, VkDevice device,
6926                                               VkDeviceMemory memory, VkDeviceSize offset,
6927                                               VkDeviceSize size, VkMemoryMapFlags flags,
6928                                               void** ppData) {
6929     return mImpl->on_vkMapMemory(pool, device, memory, offset, size, flags, ppData);
6930 }
6931 
on_vkUnmapMemory(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory)6932 void VkDecoderGlobalState::on_vkUnmapMemory(android::base::BumpPool* pool, VkDevice device,
6933                                             VkDeviceMemory memory) {
6934     mImpl->on_vkUnmapMemory(pool, device, memory);
6935 }
6936 
getMappedHostPointer(VkDeviceMemory memory)6937 uint8_t* VkDecoderGlobalState::getMappedHostPointer(VkDeviceMemory memory) {
6938     return mImpl->getMappedHostPointer(memory);
6939 }
6940 
getDeviceMemorySize(VkDeviceMemory memory)6941 VkDeviceSize VkDecoderGlobalState::getDeviceMemorySize(VkDeviceMemory memory) {
6942     return mImpl->getDeviceMemorySize(memory);
6943 }
6944 
usingDirectMapping() const6945 bool VkDecoderGlobalState::usingDirectMapping() const { return mImpl->usingDirectMapping(); }
6946 
getHostFeatureSupport() const6947 VkDecoderGlobalState::HostFeatureSupport VkDecoderGlobalState::getHostFeatureSupport() const {
6948     return mImpl->getHostFeatureSupport();
6949 }
6950 
6951 // VK_ANDROID_native_buffer
on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool * pool,VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,int * grallocUsage)6952 VkResult VkDecoderGlobalState::on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool* pool,
6953                                                                     VkDevice device,
6954                                                                     VkFormat format,
6955                                                                     VkImageUsageFlags imageUsage,
6956                                                                     int* grallocUsage) {
6957     return mImpl->on_vkGetSwapchainGrallocUsageANDROID(pool, device, format, imageUsage,
6958                                                        grallocUsage);
6959 }
6960 
on_vkGetSwapchainGrallocUsage2ANDROID(android::base::BumpPool * pool,VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)6961 VkResult VkDecoderGlobalState::on_vkGetSwapchainGrallocUsage2ANDROID(
6962     android::base::BumpPool* pool, VkDevice device, VkFormat format, VkImageUsageFlags imageUsage,
6963     VkSwapchainImageUsageFlagsANDROID swapchainImageUsage, uint64_t* grallocConsumerUsage,
6964     uint64_t* grallocProducerUsage) {
6965     return mImpl->on_vkGetSwapchainGrallocUsage2ANDROID(pool, device, format, imageUsage,
6966                                                         swapchainImageUsage, grallocConsumerUsage,
6967                                                         grallocProducerUsage);
6968 }
6969 
on_vkAcquireImageANDROID(android::base::BumpPool * pool,VkDevice device,VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)6970 VkResult VkDecoderGlobalState::on_vkAcquireImageANDROID(android::base::BumpPool* pool,
6971                                                         VkDevice device, VkImage image,
6972                                                         int nativeFenceFd, VkSemaphore semaphore,
6973                                                         VkFence fence) {
6974     return mImpl->on_vkAcquireImageANDROID(pool, device, image, nativeFenceFd, semaphore, fence);
6975 }
6976 
on_vkQueueSignalReleaseImageANDROID(android::base::BumpPool * pool,VkQueue queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)6977 VkResult VkDecoderGlobalState::on_vkQueueSignalReleaseImageANDROID(
6978     android::base::BumpPool* pool, VkQueue queue, uint32_t waitSemaphoreCount,
6979     const VkSemaphore* pWaitSemaphores, VkImage image, int* pNativeFenceFd) {
6980     return mImpl->on_vkQueueSignalReleaseImageANDROID(pool, queue, waitSemaphoreCount,
6981                                                       pWaitSemaphores, image, pNativeFenceFd);
6982 }
6983 
6984 // VK_GOOGLE_gfxstream
on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,uint64_t * pAddress)6985 VkResult VkDecoderGlobalState::on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool* pool,
6986                                                                     VkDevice device,
6987                                                                     VkDeviceMemory memory,
6988                                                                     uint64_t* pAddress) {
6989     return mImpl->on_vkMapMemoryIntoAddressSpaceGOOGLE(pool, device, memory, pAddress);
6990 }
6991 
on_vkGetMemoryHostAddressInfoGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,uint64_t * pAddress,uint64_t * pSize,uint64_t * pHostmemId)6992 VkResult VkDecoderGlobalState::on_vkGetMemoryHostAddressInfoGOOGLE(
6993     android::base::BumpPool* pool, VkDevice device, VkDeviceMemory memory, uint64_t* pAddress,
6994     uint64_t* pSize, uint64_t* pHostmemId) {
6995     return mImpl->on_vkGetMemoryHostAddressInfoGOOGLE(pool, device, memory, pAddress, pSize,
6996                                                       pHostmemId);
6997 }
6998 
on_vkGetBlobGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory)6999 VkResult VkDecoderGlobalState::on_vkGetBlobGOOGLE(android::base::BumpPool* pool, VkDevice device,
7000                                                   VkDeviceMemory memory) {
7001     return mImpl->on_vkGetBlobGOOGLE(pool, device, memory);
7002 }
7003 
on_vkFreeMemorySyncGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)7004 VkResult VkDecoderGlobalState::on_vkFreeMemorySyncGOOGLE(android::base::BumpPool* pool,
7005                                                          VkDevice device, VkDeviceMemory memory,
7006                                                          const VkAllocationCallbacks* pAllocator) {
7007     return mImpl->on_vkFreeMemorySyncGOOGLE(pool, device, memory, pAllocator);
7008 }
7009 
on_vkAllocateCommandBuffers(android::base::BumpPool * pool,VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)7010 VkResult VkDecoderGlobalState::on_vkAllocateCommandBuffers(
7011     android::base::BumpPool* pool, VkDevice device,
7012     const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers) {
7013     return mImpl->on_vkAllocateCommandBuffers(pool, device, pAllocateInfo, pCommandBuffers);
7014 }
7015 
on_vkCreateCommandPool(android::base::BumpPool * pool,VkDevice device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)7016 VkResult VkDecoderGlobalState::on_vkCreateCommandPool(android::base::BumpPool* pool,
7017                                                       VkDevice device,
7018                                                       const VkCommandPoolCreateInfo* pCreateInfo,
7019                                                       const VkAllocationCallbacks* pAllocator,
7020                                                       VkCommandPool* pCommandPool) {
7021     return mImpl->on_vkCreateCommandPool(pool, device, pCreateInfo, pAllocator, pCommandPool);
7022 }
7023 
on_vkDestroyCommandPool(android::base::BumpPool * pool,VkDevice device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)7024 void VkDecoderGlobalState::on_vkDestroyCommandPool(android::base::BumpPool* pool, VkDevice device,
7025                                                    VkCommandPool commandPool,
7026                                                    const VkAllocationCallbacks* pAllocator) {
7027     mImpl->on_vkDestroyCommandPool(pool, device, commandPool, pAllocator);
7028 }
7029 
on_vkResetCommandPool(android::base::BumpPool * pool,VkDevice device,VkCommandPool commandPool,VkCommandPoolResetFlags flags)7030 VkResult VkDecoderGlobalState::on_vkResetCommandPool(android::base::BumpPool* pool, VkDevice device,
7031                                                      VkCommandPool commandPool,
7032                                                      VkCommandPoolResetFlags flags) {
7033     return mImpl->on_vkResetCommandPool(pool, device, commandPool, flags);
7034 }
7035 
on_vkCmdExecuteCommands(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)7036 void VkDecoderGlobalState::on_vkCmdExecuteCommands(android::base::BumpPool* pool,
7037                                                    VkCommandBuffer commandBuffer,
7038                                                    uint32_t commandBufferCount,
7039                                                    const VkCommandBuffer* pCommandBuffers) {
7040     return mImpl->on_vkCmdExecuteCommands(pool, commandBuffer, commandBufferCount, pCommandBuffers);
7041 }
7042 
on_vkQueueSubmit(android::base::BumpPool * pool,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)7043 VkResult VkDecoderGlobalState::on_vkQueueSubmit(android::base::BumpPool* pool, VkQueue queue,
7044                                                 uint32_t submitCount, const VkSubmitInfo* pSubmits,
7045                                                 VkFence fence) {
7046     return mImpl->on_vkQueueSubmit(pool, queue, submitCount, pSubmits, fence);
7047 }
7048 
on_vkQueueWaitIdle(android::base::BumpPool * pool,VkQueue queue)7049 VkResult VkDecoderGlobalState::on_vkQueueWaitIdle(android::base::BumpPool* pool, VkQueue queue) {
7050     return mImpl->on_vkQueueWaitIdle(pool, queue);
7051 }
7052 
on_vkResetCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)7053 VkResult VkDecoderGlobalState::on_vkResetCommandBuffer(android::base::BumpPool* pool,
7054                                                        VkCommandBuffer commandBuffer,
7055                                                        VkCommandBufferResetFlags flags) {
7056     return mImpl->on_vkResetCommandBuffer(pool, commandBuffer, flags);
7057 }
7058 
on_vkFreeCommandBuffers(android::base::BumpPool * pool,VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)7059 void VkDecoderGlobalState::on_vkFreeCommandBuffers(android::base::BumpPool* pool, VkDevice device,
7060                                                    VkCommandPool commandPool,
7061                                                    uint32_t commandBufferCount,
7062                                                    const VkCommandBuffer* pCommandBuffers) {
7063     return mImpl->on_vkFreeCommandBuffers(pool, device, commandPool, commandBufferCount,
7064                                           pCommandBuffers);
7065 }
7066 
on_vkGetPhysicalDeviceExternalSemaphoreProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)7067 void VkDecoderGlobalState::on_vkGetPhysicalDeviceExternalSemaphoreProperties(
7068     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7069     const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
7070     VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
7071     return mImpl->on_vkGetPhysicalDeviceExternalSemaphoreProperties(
7072         pool, physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
7073 }
7074 
on_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)7075 void VkDecoderGlobalState::on_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
7076     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7077     const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
7078     VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
7079     return mImpl->on_vkGetPhysicalDeviceExternalSemaphoreProperties(
7080         pool, physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
7081 }
7082 
7083 // Descriptor update templates
on_vkCreateDescriptorUpdateTemplate(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)7084 VkResult VkDecoderGlobalState::on_vkCreateDescriptorUpdateTemplate(
7085     android::base::BumpPool* pool, VkDevice boxed_device,
7086     const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
7087     const VkAllocationCallbacks* pAllocator,
7088     VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
7089     return mImpl->on_vkCreateDescriptorUpdateTemplate(pool, boxed_device, pCreateInfo, pAllocator,
7090                                                       pDescriptorUpdateTemplate);
7091 }
7092 
on_vkCreateDescriptorUpdateTemplateKHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)7093 VkResult VkDecoderGlobalState::on_vkCreateDescriptorUpdateTemplateKHR(
7094     android::base::BumpPool* pool, VkDevice boxed_device,
7095     const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
7096     const VkAllocationCallbacks* pAllocator,
7097     VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
7098     return mImpl->on_vkCreateDescriptorUpdateTemplateKHR(pool, boxed_device, pCreateInfo,
7099                                                          pAllocator, pDescriptorUpdateTemplate);
7100 }
7101 
on_vkDestroyDescriptorUpdateTemplate(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)7102 void VkDecoderGlobalState::on_vkDestroyDescriptorUpdateTemplate(
7103     android::base::BumpPool* pool, VkDevice boxed_device,
7104     VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {
7105     mImpl->on_vkDestroyDescriptorUpdateTemplate(pool, boxed_device, descriptorUpdateTemplate,
7106                                                 pAllocator);
7107 }
7108 
on_vkDestroyDescriptorUpdateTemplateKHR(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)7109 void VkDecoderGlobalState::on_vkDestroyDescriptorUpdateTemplateKHR(
7110     android::base::BumpPool* pool, VkDevice boxed_device,
7111     VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {
7112     mImpl->on_vkDestroyDescriptorUpdateTemplateKHR(pool, boxed_device, descriptorUpdateTemplate,
7113                                                    pAllocator);
7114 }
7115 
on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,uint32_t imageInfoCount,uint32_t bufferInfoCount,uint32_t bufferViewCount,const uint32_t * pImageInfoEntryIndices,const uint32_t * pBufferInfoEntryIndices,const uint32_t * pBufferViewEntryIndices,const VkDescriptorImageInfo * pImageInfos,const VkDescriptorBufferInfo * pBufferInfos,const VkBufferView * pBufferViews)7116 void VkDecoderGlobalState::on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
7117     android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
7118     VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
7119     uint32_t bufferInfoCount, uint32_t bufferViewCount, const uint32_t* pImageInfoEntryIndices,
7120     const uint32_t* pBufferInfoEntryIndices, const uint32_t* pBufferViewEntryIndices,
7121     const VkDescriptorImageInfo* pImageInfos, const VkDescriptorBufferInfo* pBufferInfos,
7122     const VkBufferView* pBufferViews) {
7123     mImpl->on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
7124         pool, boxed_device, descriptorSet, descriptorUpdateTemplate, imageInfoCount,
7125         bufferInfoCount, bufferViewCount, pImageInfoEntryIndices, pBufferInfoEntryIndices,
7126         pBufferViewEntryIndices, pImageInfos, pBufferInfos, pBufferViews);
7127 }
7128 
on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,uint32_t imageInfoCount,uint32_t bufferInfoCount,uint32_t bufferViewCount,uint32_t inlineUniformBlockCount,const uint32_t * pImageInfoEntryIndices,const uint32_t * pBufferInfoEntryIndices,const uint32_t * pBufferViewEntryIndices,const VkDescriptorImageInfo * pImageInfos,const VkDescriptorBufferInfo * pBufferInfos,const VkBufferView * pBufferViews,const uint8_t * pInlineUniformBlockData)7129 void VkDecoderGlobalState::on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(
7130     android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
7131     VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
7132     uint32_t bufferInfoCount, uint32_t bufferViewCount, uint32_t inlineUniformBlockCount,
7133     const uint32_t* pImageInfoEntryIndices, const uint32_t* pBufferInfoEntryIndices,
7134     const uint32_t* pBufferViewEntryIndices, const VkDescriptorImageInfo* pImageInfos,
7135     const VkDescriptorBufferInfo* pBufferInfos, const VkBufferView* pBufferViews,
7136     const uint8_t* pInlineUniformBlockData) {
7137     mImpl->on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(
7138         pool, boxed_device, descriptorSet, descriptorUpdateTemplate, imageInfoCount,
7139         bufferInfoCount, bufferViewCount, inlineUniformBlockCount, pImageInfoEntryIndices,
7140         pBufferInfoEntryIndices, pBufferViewEntryIndices, pImageInfos, pBufferInfos, pBufferViews,
7141         pInlineUniformBlockData);
7142 }
7143 
on_vkBeginCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo,const VkDecoderContext & context)7144 VkResult VkDecoderGlobalState::on_vkBeginCommandBuffer(android::base::BumpPool* pool,
7145                                                        VkCommandBuffer commandBuffer,
7146                                                        const VkCommandBufferBeginInfo* pBeginInfo,
7147                                                        const VkDecoderContext& context) {
7148     return mImpl->on_vkBeginCommandBuffer(pool, commandBuffer, pBeginInfo, context);
7149 }
7150 
on_vkBeginCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo,const VkDecoderContext & context)7151 void VkDecoderGlobalState::on_vkBeginCommandBufferAsyncGOOGLE(
7152     android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
7153     const VkCommandBufferBeginInfo* pBeginInfo, const VkDecoderContext& context) {
7154     mImpl->on_vkBeginCommandBuffer(pool, commandBuffer, pBeginInfo, context);
7155 }
7156 
on_vkEndCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkDecoderContext & context)7157 VkResult VkDecoderGlobalState::on_vkEndCommandBuffer(android::base::BumpPool* pool,
7158                                                      VkCommandBuffer commandBuffer,
7159                                                      const VkDecoderContext& context) {
7160     return mImpl->on_vkEndCommandBuffer(pool, commandBuffer, context);
7161 }
7162 
on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkDecoderContext & context)7163 void VkDecoderGlobalState::on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
7164                                                             VkCommandBuffer commandBuffer,
7165                                                             const VkDecoderContext& context) {
7166     mImpl->on_vkEndCommandBufferAsyncGOOGLE(pool, commandBuffer, context);
7167 }
7168 
on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)7169 void VkDecoderGlobalState::on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
7170                                                               VkCommandBuffer commandBuffer,
7171                                                               VkCommandBufferResetFlags flags) {
7172     mImpl->on_vkResetCommandBufferAsyncGOOGLE(pool, commandBuffer, flags);
7173 }
7174 
on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,uint32_t needHostSync,uint32_t sequenceNumber)7175 void VkDecoderGlobalState::on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool* pool,
7176                                                             VkCommandBuffer commandBuffer,
7177                                                             uint32_t needHostSync,
7178                                                             uint32_t sequenceNumber) {
7179     mImpl->hostSyncCommandBuffer("hostSync", commandBuffer, needHostSync, sequenceNumber);
7180 }
7181 
on_vkCreateImageWithRequirementsGOOGLE(android::base::BumpPool * pool,VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage,VkMemoryRequirements * pMemoryRequirements)7182 VkResult VkDecoderGlobalState::on_vkCreateImageWithRequirementsGOOGLE(
7183     android::base::BumpPool* pool, VkDevice device, const VkImageCreateInfo* pCreateInfo,
7184     const VkAllocationCallbacks* pAllocator, VkImage* pImage,
7185     VkMemoryRequirements* pMemoryRequirements) {
7186     return mImpl->on_vkCreateImageWithRequirementsGOOGLE(pool, device, pCreateInfo, pAllocator,
7187                                                          pImage, pMemoryRequirements);
7188 }
7189 
on_vkCreateBufferWithRequirementsGOOGLE(android::base::BumpPool * pool,VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer,VkMemoryRequirements * pMemoryRequirements)7190 VkResult VkDecoderGlobalState::on_vkCreateBufferWithRequirementsGOOGLE(
7191     android::base::BumpPool* pool, VkDevice device, const VkBufferCreateInfo* pCreateInfo,
7192     const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer,
7193     VkMemoryRequirements* pMemoryRequirements) {
7194     return mImpl->on_vkCreateBufferWithRequirementsGOOGLE(pool, device, pCreateInfo, pAllocator,
7195                                                           pBuffer, pMemoryRequirements);
7196 }
7197 
on_vkCmdBindPipeline(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)7198 void VkDecoderGlobalState::on_vkCmdBindPipeline(android::base::BumpPool* pool,
7199                                                 VkCommandBuffer commandBuffer,
7200                                                 VkPipelineBindPoint pipelineBindPoint,
7201                                                 VkPipeline pipeline) {
7202     mImpl->on_vkCmdBindPipeline(pool, commandBuffer, pipelineBindPoint, pipeline);
7203 }
7204 
on_vkCmdBindDescriptorSets(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)7205 void VkDecoderGlobalState::on_vkCmdBindDescriptorSets(
7206     android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
7207     VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet,
7208     uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets,
7209     uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
7210     mImpl->on_vkCmdBindDescriptorSets(pool, commandBuffer, pipelineBindPoint, layout, firstSet,
7211                                       descriptorSetCount, pDescriptorSets, dynamicOffsetCount,
7212                                       pDynamicOffsets);
7213 }
7214 
on_vkCreateRenderPass(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)7215 VkResult VkDecoderGlobalState::on_vkCreateRenderPass(android::base::BumpPool* pool,
7216                                                      VkDevice boxed_device,
7217                                                      const VkRenderPassCreateInfo* pCreateInfo,
7218                                                      const VkAllocationCallbacks* pAllocator,
7219                                                      VkRenderPass* pRenderPass) {
7220     return mImpl->on_vkCreateRenderPass(pool, boxed_device, pCreateInfo, pAllocator, pRenderPass);
7221 }
7222 
on_vkCreateRenderPass2(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo2 * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)7223 VkResult VkDecoderGlobalState::on_vkCreateRenderPass2(android::base::BumpPool* pool,
7224                                                       VkDevice boxed_device,
7225                                                       const VkRenderPassCreateInfo2* pCreateInfo,
7226                                                       const VkAllocationCallbacks* pAllocator,
7227                                                       VkRenderPass* pRenderPass) {
7228     return mImpl->on_vkCreateRenderPass2(pool, boxed_device, pCreateInfo, pAllocator, pRenderPass);
7229 }
7230 
on_vkCreateRenderPass2KHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo2KHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)7231 VkResult VkDecoderGlobalState::on_vkCreateRenderPass2KHR(
7232     android::base::BumpPool* pool, VkDevice boxed_device,
7233     const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator,
7234     VkRenderPass* pRenderPass) {
7235     return mImpl->on_vkCreateRenderPass2(pool, boxed_device, pCreateInfo, pAllocator, pRenderPass);
7236 }
7237 
on_vkDestroyRenderPass(android::base::BumpPool * pool,VkDevice boxed_device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)7238 void VkDecoderGlobalState::on_vkDestroyRenderPass(android::base::BumpPool* pool,
7239                                                   VkDevice boxed_device, VkRenderPass renderPass,
7240                                                   const VkAllocationCallbacks* pAllocator) {
7241     mImpl->on_vkDestroyRenderPass(pool, boxed_device, renderPass, pAllocator);
7242 }
7243 
on_vkCreateFramebuffer(android::base::BumpPool * pool,VkDevice boxed_device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)7244 VkResult VkDecoderGlobalState::on_vkCreateFramebuffer(android::base::BumpPool* pool,
7245                                                       VkDevice boxed_device,
7246                                                       const VkFramebufferCreateInfo* pCreateInfo,
7247                                                       const VkAllocationCallbacks* pAllocator,
7248                                                       VkFramebuffer* pFramebuffer) {
7249     return mImpl->on_vkCreateFramebuffer(pool, boxed_device, pCreateInfo, pAllocator, pFramebuffer);
7250 }
7251 
on_vkDestroyFramebuffer(android::base::BumpPool * pool,VkDevice boxed_device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)7252 void VkDecoderGlobalState::on_vkDestroyFramebuffer(android::base::BumpPool* pool,
7253                                                    VkDevice boxed_device, VkFramebuffer framebuffer,
7254                                                    const VkAllocationCallbacks* pAllocator) {
7255     mImpl->on_vkDestroyFramebuffer(pool, boxed_device, framebuffer, pAllocator);
7256 }
7257 
on_vkQueueHostSyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t needHostSync,uint32_t sequenceNumber)7258 void VkDecoderGlobalState::on_vkQueueHostSyncGOOGLE(android::base::BumpPool* pool, VkQueue queue,
7259                                                     uint32_t needHostSync,
7260                                                     uint32_t sequenceNumber) {
7261     mImpl->hostSyncQueue("hostSyncQueue", queue, needHostSync, sequenceNumber);
7262 }
7263 
on_vkCmdCopyQueryPoolResults(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)7264 void VkDecoderGlobalState::on_vkCmdCopyQueryPoolResults(android::base::BumpPool* pool,
7265                                                         VkCommandBuffer commandBuffer,
7266                                                         VkQueryPool queryPool, uint32_t firstQuery,
7267                                                         uint32_t queryCount, VkBuffer dstBuffer,
7268                                                         VkDeviceSize dstOffset, VkDeviceSize stride,
7269                                                         VkQueryResultFlags flags) {
7270     mImpl->on_vkCmdCopyQueryPoolResults(pool, commandBuffer, queryPool, firstQuery, queryCount,
7271                                         dstBuffer, dstOffset, stride, flags);
7272 }
7273 
on_vkQueueSubmitAsyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)7274 void VkDecoderGlobalState::on_vkQueueSubmitAsyncGOOGLE(android::base::BumpPool* pool, VkQueue queue,
7275                                                        uint32_t submitCount,
7276                                                        const VkSubmitInfo* pSubmits,
7277                                                        VkFence fence) {
7278     mImpl->on_vkQueueSubmit(pool, queue, submitCount, pSubmits, fence);
7279 }
7280 
on_vkQueueWaitIdleAsyncGOOGLE(android::base::BumpPool * pool,VkQueue queue)7281 void VkDecoderGlobalState::on_vkQueueWaitIdleAsyncGOOGLE(android::base::BumpPool* pool,
7282                                                          VkQueue queue) {
7283     mImpl->on_vkQueueWaitIdle(pool, queue);
7284 }
7285 
on_vkQueueBindSparseAsyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)7286 void VkDecoderGlobalState::on_vkQueueBindSparseAsyncGOOGLE(android::base::BumpPool* pool,
7287                                                            VkQueue queue, uint32_t bindInfoCount,
7288                                                            const VkBindSparseInfo* pBindInfo,
7289                                                            VkFence fence) {
7290     mImpl->on_vkQueueBindSparse(pool, queue, bindInfoCount, pBindInfo, fence);
7291 }
7292 
on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool * pool,VkDevice device,VkFormat format,VkDeviceSize * pOffset,VkDeviceSize * pRowPitchAlignment)7293 void VkDecoderGlobalState::on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool* pool,
7294                                                            VkDevice device, VkFormat format,
7295                                                            VkDeviceSize* pOffset,
7296                                                            VkDeviceSize* pRowPitchAlignment) {
7297     mImpl->on_vkGetLinearImageLayoutGOOGLE(pool, device, format, pOffset, pRowPitchAlignment);
7298 }
7299 
on_vkGetLinearImageLayout2GOOGLE(android::base::BumpPool * pool,VkDevice device,const VkImageCreateInfo * pCreateInfo,VkDeviceSize * pOffset,VkDeviceSize * pRowPitchAlignment)7300 void VkDecoderGlobalState::on_vkGetLinearImageLayout2GOOGLE(android::base::BumpPool* pool,
7301                                                             VkDevice device,
7302                                                             const VkImageCreateInfo* pCreateInfo,
7303                                                             VkDeviceSize* pOffset,
7304                                                             VkDeviceSize* pRowPitchAlignment) {
7305     mImpl->on_vkGetLinearImageLayout2GOOGLE(pool, device, pCreateInfo, pOffset, pRowPitchAlignment);
7306 }
7307 
on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool * pool,VkQueue queue,VkCommandBuffer commandBuffer,VkDeviceSize dataSize,const void * pData,const VkDecoderContext & context)7308 void VkDecoderGlobalState::on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool* pool,
7309                                                          VkQueue queue,
7310                                                          VkCommandBuffer commandBuffer,
7311                                                          VkDeviceSize dataSize, const void* pData,
7312                                                          const VkDecoderContext& context) {
7313     mImpl->on_vkQueueFlushCommandsGOOGLE(pool, queue, commandBuffer, dataSize, pData, context);
7314 }
7315 
on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(android::base::BumpPool * pool,VkQueue queue,VkCommandBuffer commandBuffer,VkDeviceMemory deviceMemory,VkDeviceSize dataOffset,VkDeviceSize dataSize,const VkDecoderContext & context)7316 void VkDecoderGlobalState::on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(
7317     android::base::BumpPool* pool, VkQueue queue, VkCommandBuffer commandBuffer,
7318     VkDeviceMemory deviceMemory, VkDeviceSize dataOffset, VkDeviceSize dataSize,
7319     const VkDecoderContext& context) {
7320     mImpl->on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(pool, queue, commandBuffer, deviceMemory,
7321                                                       dataOffset, dataSize, context);
7322 }
7323 
on_vkQueueCommitDescriptorSetUpdatesGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t descriptorPoolCount,const VkDescriptorPool * pDescriptorPools,uint32_t descriptorSetCount,const VkDescriptorSetLayout * pDescriptorSetLayouts,const uint64_t * pDescriptorSetPoolIds,const uint32_t * pDescriptorSetWhichPool,const uint32_t * pDescriptorSetPendingAllocation,const uint32_t * pDescriptorWriteStartingIndices,uint32_t pendingDescriptorWriteCount,const VkWriteDescriptorSet * pPendingDescriptorWrites)7324 void VkDecoderGlobalState::on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
7325     android::base::BumpPool* pool, VkQueue queue, uint32_t descriptorPoolCount,
7326     const VkDescriptorPool* pDescriptorPools, uint32_t descriptorSetCount,
7327     const VkDescriptorSetLayout* pDescriptorSetLayouts, const uint64_t* pDescriptorSetPoolIds,
7328     const uint32_t* pDescriptorSetWhichPool, const uint32_t* pDescriptorSetPendingAllocation,
7329     const uint32_t* pDescriptorWriteStartingIndices, uint32_t pendingDescriptorWriteCount,
7330     const VkWriteDescriptorSet* pPendingDescriptorWrites) {
7331     mImpl->on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
7332         pool, queue, descriptorPoolCount, pDescriptorPools, descriptorSetCount,
7333         pDescriptorSetLayouts, pDescriptorSetPoolIds, pDescriptorSetWhichPool,
7334         pDescriptorSetPendingAllocation, pDescriptorWriteStartingIndices,
7335         pendingDescriptorWriteCount, pPendingDescriptorWrites);
7336 }
7337 
on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,uint32_t * pPoolIdCount,uint64_t * pPoolIds)7338 void VkDecoderGlobalState::on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool* pool,
7339                                                                VkDevice device,
7340                                                                VkDescriptorPool descriptorPool,
7341                                                                uint32_t* pPoolIdCount,
7342                                                                uint64_t* pPoolIds) {
7343     mImpl->on_vkCollectDescriptorPoolIdsGOOGLE(pool, device, descriptorPool, pPoolIdCount,
7344                                                pPoolIds);
7345 }
7346 
on_vkQueueBindSparse(android::base::BumpPool * pool,VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)7347 VkResult VkDecoderGlobalState::on_vkQueueBindSparse(android::base::BumpPool* pool, VkQueue queue,
7348                                                     uint32_t bindInfoCount,
7349                                                     const VkBindSparseInfo* pBindInfo,
7350                                                     VkFence fence) {
7351     return mImpl->on_vkQueueBindSparse(pool, queue, bindInfoCount, pBindInfo, fence);
7352 }
7353 
on_vkQueueSignalReleaseImageANDROIDAsyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image)7354 void VkDecoderGlobalState::on_vkQueueSignalReleaseImageANDROIDAsyncGOOGLE(
7355     android::base::BumpPool* pool, VkQueue queue, uint32_t waitSemaphoreCount,
7356     const VkSemaphore* pWaitSemaphores, VkImage image) {
7357     int fenceFd;
7358     mImpl->on_vkQueueSignalReleaseImageANDROID(pool, queue, waitSemaphoreCount, pWaitSemaphores,
7359                                                image, &fenceFd);
7360 }
7361 
on_vkCreateSamplerYcbcrConversion(android::base::BumpPool * pool,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)7362 VkResult VkDecoderGlobalState::on_vkCreateSamplerYcbcrConversion(
7363     android::base::BumpPool* pool, VkDevice device,
7364     const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
7365     VkSamplerYcbcrConversion* pYcbcrConversion) {
7366     return mImpl->on_vkCreateSamplerYcbcrConversion(pool, device, pCreateInfo, pAllocator,
7367                                                     pYcbcrConversion);
7368 }
7369 
on_vkCreateSamplerYcbcrConversionKHR(android::base::BumpPool * pool,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)7370 VkResult VkDecoderGlobalState::on_vkCreateSamplerYcbcrConversionKHR(
7371     android::base::BumpPool* pool, VkDevice device,
7372     const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
7373     VkSamplerYcbcrConversion* pYcbcrConversion) {
7374     return mImpl->on_vkCreateSamplerYcbcrConversion(pool, device, pCreateInfo, pAllocator,
7375                                                     pYcbcrConversion);
7376 }
7377 
on_vkDestroySamplerYcbcrConversion(android::base::BumpPool * pool,VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)7378 void VkDecoderGlobalState::on_vkDestroySamplerYcbcrConversion(
7379     android::base::BumpPool* pool, VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
7380     const VkAllocationCallbacks* pAllocator) {
7381     mImpl->on_vkDestroySamplerYcbcrConversion(pool, device, ycbcrConversion, pAllocator);
7382 }
7383 
on_vkDestroySamplerYcbcrConversionKHR(android::base::BumpPool * pool,VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)7384 void VkDecoderGlobalState::on_vkDestroySamplerYcbcrConversionKHR(
7385     android::base::BumpPool* pool, VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
7386     const VkAllocationCallbacks* pAllocator) {
7387     mImpl->on_vkDestroySamplerYcbcrConversion(pool, device, ycbcrConversion, pAllocator);
7388 }
7389 
on_DeviceLost()7390 void VkDecoderGlobalState::on_DeviceLost() { mImpl->on_DeviceLost(); }
7391 
DeviceLostHandler()7392 void VkDecoderGlobalState::DeviceLostHandler() { mImpl->DeviceLostHandler(); }
7393 
on_CheckOutOfMemory(VkResult result,uint32_t opCode,const VkDecoderContext & context,std::optional<uint64_t> allocationSize)7394 void VkDecoderGlobalState::on_CheckOutOfMemory(VkResult result, uint32_t opCode,
7395                                                const VkDecoderContext& context,
7396                                                std::optional<uint64_t> allocationSize) {
7397     mImpl->on_CheckOutOfMemory(result, opCode, context, allocationSize);
7398 }
7399 
waitForFence(VkFence boxed_fence,uint64_t timeout)7400 VkResult VkDecoderGlobalState::waitForFence(VkFence boxed_fence, uint64_t timeout) {
7401     return mImpl->waitForFence(boxed_fence, timeout);
7402 }
7403 
getFenceStatus(VkFence boxed_fence)7404 VkResult VkDecoderGlobalState::getFenceStatus(VkFence boxed_fence) {
7405     return mImpl->getFenceStatus(boxed_fence);
7406 }
7407 
registerQsriCallback(VkImage image,VkQsriTimeline::Callback callback)7408 AsyncResult VkDecoderGlobalState::registerQsriCallback(VkImage image,
7409                                                        VkQsriTimeline::Callback callback) {
7410     return mImpl->registerQsriCallback(image, std::move(callback));
7411 }
7412 
deviceMemoryTransform_tohost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)7413 void VkDecoderGlobalState::deviceMemoryTransform_tohost(VkDeviceMemory* memory,
7414                                                         uint32_t memoryCount, VkDeviceSize* offset,
7415                                                         uint32_t offsetCount, VkDeviceSize* size,
7416                                                         uint32_t sizeCount, uint32_t* typeIndex,
7417                                                         uint32_t typeIndexCount, uint32_t* typeBits,
7418                                                         uint32_t typeBitsCount) {
7419     // Not used currently
7420     (void)memory;
7421     (void)memoryCount;
7422     (void)offset;
7423     (void)offsetCount;
7424     (void)size;
7425     (void)sizeCount;
7426     (void)typeIndex;
7427     (void)typeIndexCount;
7428     (void)typeBits;
7429     (void)typeBitsCount;
7430 }
7431 
deviceMemoryTransform_fromhost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)7432 void VkDecoderGlobalState::deviceMemoryTransform_fromhost(
7433     VkDeviceMemory* memory, uint32_t memoryCount, VkDeviceSize* offset, uint32_t offsetCount,
7434     VkDeviceSize* size, uint32_t sizeCount, uint32_t* typeIndex, uint32_t typeIndexCount,
7435     uint32_t* typeBits, uint32_t typeBitsCount) {
7436     // Not used currently
7437     (void)memory;
7438     (void)memoryCount;
7439     (void)offset;
7440     (void)offsetCount;
7441     (void)size;
7442     (void)sizeCount;
7443     (void)typeIndex;
7444     (void)typeIndexCount;
7445     (void)typeBits;
7446     (void)typeBitsCount;
7447 }
7448 
snapshot()7449 VkDecoderSnapshot* VkDecoderGlobalState::snapshot() { return mImpl->snapshot(); }
7450 
7451 #define DEFINE_TRANSFORMED_TYPE_IMPL(type)                                                        \
7452     void VkDecoderGlobalState::transformImpl_##type##_tohost(const type* val, uint32_t count) {   \
7453         mImpl->transformImpl_##type##_tohost(val, count);                                         \
7454     }                                                                                             \
7455     void VkDecoderGlobalState::transformImpl_##type##_fromhost(const type* val, uint32_t count) { \
7456         mImpl->transformImpl_##type##_fromhost(val, count);                                       \
7457     }
7458 
7459 LIST_TRANSFORMED_TYPES(DEFINE_TRANSFORMED_TYPE_IMPL)
7460 
7461 #define DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DEF(type)                                         \
7462     type VkDecoderGlobalState::new_boxed_##type(type underlying, VulkanDispatch* dispatch,     \
7463                                                 bool ownDispatch) {                            \
7464         return mImpl->new_boxed_##type(underlying, dispatch, ownDispatch);                     \
7465     }                                                                                          \
7466     void VkDecoderGlobalState::delete_##type(type boxed) { mImpl->delete_##type(boxed); }      \
7467     type VkDecoderGlobalState::unbox_##type(type boxed) { return mImpl->unbox_##type(boxed); } \
7468     type VkDecoderGlobalState::unboxed_to_boxed_##type(type unboxed) {                         \
7469         return mImpl->unboxed_to_boxed_##type(unboxed);                                        \
7470     }                                                                                          \
7471     VulkanDispatch* VkDecoderGlobalState::dispatch_##type(type boxed) {                        \
7472         return mImpl->dispatch_##type(boxed);                                                  \
7473     }
7474 
7475 #define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_DEF(type)                                     \
7476     type VkDecoderGlobalState::new_boxed_non_dispatchable_##type(type underlying) {            \
7477         return mImpl->new_boxed_non_dispatchable_##type(underlying);                           \
7478     }                                                                                          \
7479     void VkDecoderGlobalState::delete_##type(type boxed) { mImpl->delete_##type(boxed); }      \
7480     type VkDecoderGlobalState::unbox_##type(type boxed) { return mImpl->unbox_##type(boxed); } \
7481     type VkDecoderGlobalState::unboxed_to_boxed_non_dispatchable_##type(type unboxed) {        \
7482         return mImpl->unboxed_to_boxed_non_dispatchable_##type(unboxed);                       \
7483     }
7484 
GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DEF)7485 GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DEF)
7486 GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_DEF)
7487 
7488 #define DEFINE_BOXED_DISPATCHABLE_HANDLE_GLOBAL_API_DEF(type)                                     \
7489     type unbox_##type(type boxed) {                                                               \
7490         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
7491         if (!elt) return VK_NULL_HANDLE;                                                          \
7492         return (type)elt->underlying;                                                             \
7493     }                                                                                             \
7494     VulkanDispatch* dispatch_##type(type boxed) {                                                 \
7495         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
7496         if (!elt) {                                                                               \
7497             fprintf(stderr, "%s: err not found boxed %p\n", __func__, boxed);                     \
7498             return nullptr;                                                                       \
7499         }                                                                                         \
7500         return elt->dispatch;                                                                     \
7501     }                                                                                             \
7502     void delete_##type(type boxed) {                                                              \
7503         if (!boxed) return;                                                                       \
7504         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
7505         if (!elt) return;                                                                         \
7506         releaseOrderMaintInfo(elt->ordMaintInfo);                                                 \
7507         if (elt->readStream) {                                                                    \
7508             sReadStreamRegistry.push(elt->readStream);                                            \
7509             elt->readStream = nullptr;                                                            \
7510         }                                                                                         \
7511         sBoxedHandleManager.remove((uint64_t)boxed);                                              \
7512     }                                                                                             \
7513     type unboxed_to_boxed_##type(type unboxed) {                                                  \
7514         AutoLock lock(sBoxedHandleManager.lock);                                                  \
7515         return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
7516     }
7517 
7518 #define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_GLOBAL_API_DEF(type)                                 \
7519     type new_boxed_non_dispatchable_##type(type underlying) {                                     \
7520         return VkDecoderGlobalState::get()->new_boxed_non_dispatchable_##type(underlying);        \
7521     }                                                                                             \
7522     void delete_##type(type boxed) {                                                              \
7523         if (!boxed) return;                                                                       \
7524         sBoxedHandleManager.remove((uint64_t)boxed);                                              \
7525     }                                                                                             \
7526     void delayed_delete_##type(type boxed, VkDevice device, std::function<void()> callback) {     \
7527         sBoxedHandleManager.removeDelayed((uint64_t)boxed, device, callback);                     \
7528     }                                                                                             \
7529     type unbox_##type(type boxed) {                                                               \
7530         if (!boxed) return boxed;                                                                 \
7531         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
7532         if (!elt) {                                                                               \
7533             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))                                       \
7534                 << "Unbox " << boxed << " failed, not found.";                                    \
7535             return VK_NULL_HANDLE;                                                                \
7536         }                                                                                         \
7537         return (type)elt->underlying;                                                             \
7538     }                                                                                             \
7539     type unboxed_to_boxed_non_dispatchable_##type(type unboxed) {                                 \
7540         AutoLock lock(sBoxedHandleManager.lock);                                                  \
7541         return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
7542     }
7543 
7544 GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_GLOBAL_API_DEF)
7545 GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_GLOBAL_API_DEF)
7546 
7547 void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::setup(android::base::BumpPool* pool,
7548                                                            uint64_t** bufPtr) {
7549     mPool = pool;
7550     mPreserveBufPtr = bufPtr;
7551 }
7552 
allocPreserve(size_t count)7553 void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::allocPreserve(size_t count) {
7554     *mPreserveBufPtr = (uint64_t*)mPool->alloc(count * sizeof(uint64_t));
7555 }
7556 
7557 #define BOXED_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL(type_name)        \
7558     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name(          \
7559         type_name* handles, size_t count) {                                               \
7560         allocPreserve(count);                                                             \
7561         for (size_t i = 0; i < count; ++i) {                                              \
7562             (*mPreserveBufPtr)[i] = (uint64_t)(handles[i]);                               \
7563             if (handles[i]) {                                                             \
7564                 handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(handles[i]);  \
7565             } else {                                                                      \
7566                 handles[i] = (type_name) nullptr;                                         \
7567             };                                                                            \
7568         }                                                                                 \
7569     }                                                                                     \
7570     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name##_u64(    \
7571         const type_name* handles, uint64_t* handle_u64s, size_t count) {                  \
7572         allocPreserve(count);                                                             \
7573         for (size_t i = 0; i < count; ++i) {                                              \
7574             (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]);                           \
7575             if (handles[i]) {                                                             \
7576                 handle_u64s[i] =                                                          \
7577                     (uint64_t)VkDecoderGlobalState::get()->unbox_##type_name(handles[i]); \
7578             } else {                                                                      \
7579                 handle_u64s[i] = 0;                                                       \
7580             }                                                                             \
7581         }                                                                                 \
7582     }                                                                                     \
7583     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_u64_##type_name(      \
7584         const uint64_t* handle_u64s, type_name* handles, size_t count) {                  \
7585         allocPreserve(count);                                                             \
7586         for (size_t i = 0; i < count; ++i) {                                              \
7587             (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]);                           \
7588             if (handle_u64s[i]) {                                                         \
7589                 handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(              \
7590                     (type_name)(uintptr_t)handle_u64s[i]);                                \
7591             } else {                                                                      \
7592                 handles[i] = (type_name) nullptr;                                         \
7593             }                                                                             \
7594         }                                                                                 \
7595     }
7596 
7597 #define BOXED_NON_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL(type_name)    \
7598     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name(          \
7599         type_name* handles, size_t count) {                                               \
7600         allocPreserve(count);                                                             \
7601         for (size_t i = 0; i < count; ++i) {                                              \
7602             (*mPreserveBufPtr)[i] = (uint64_t)(handles[i]);                               \
7603             if (handles[i]) {                                                             \
7604                 auto boxed = handles[i];                                                  \
7605                 handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(handles[i]);  \
7606                 delete_##type_name(boxed);                                                \
7607             } else {                                                                      \
7608                 handles[i] = (type_name) nullptr;                                         \
7609             };                                                                            \
7610         }                                                                                 \
7611     }                                                                                     \
7612     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name##_u64(    \
7613         const type_name* handles, uint64_t* handle_u64s, size_t count) {                  \
7614         allocPreserve(count);                                                             \
7615         for (size_t i = 0; i < count; ++i) {                                              \
7616             (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]);                           \
7617             if (handles[i]) {                                                             \
7618                 auto boxed = handles[i];                                                  \
7619                 handle_u64s[i] =                                                          \
7620                     (uint64_t)VkDecoderGlobalState::get()->unbox_##type_name(handles[i]); \
7621                 delete_##type_name(boxed);                                                \
7622             } else {                                                                      \
7623                 handle_u64s[i] = 0;                                                       \
7624             }                                                                             \
7625         }                                                                                 \
7626     }                                                                                     \
7627     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_u64_##type_name(      \
7628         const uint64_t* handle_u64s, type_name* handles, size_t count) {                  \
7629         allocPreserve(count);                                                             \
7630         for (size_t i = 0; i < count; ++i) {                                              \
7631             (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]);                           \
7632             if (handle_u64s[i]) {                                                         \
7633                 auto boxed = (type_name)(uintptr_t)handle_u64s[i];                        \
7634                 handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(              \
7635                     (type_name)(uintptr_t)handle_u64s[i]);                                \
7636                 delete_##type_name(boxed);                                                \
7637             } else {                                                                      \
7638                 handles[i] = (type_name) nullptr;                                         \
7639             }                                                                             \
7640         }                                                                                 \
7641     }
7642 
7643 GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(
7644     BOXED_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL)
7645 GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(
7646     BOXED_NON_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL)
7647 
7648 }  // namespace vk
7649 }  // namespace gfxstream
7650