• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (C) 2018 The Android Open Source Project
2 // Copyright (C) 2018 Google Inc.
3 //
4 // Licensed under the Apache License, Version 2.0 (the "License");
5 // you may not use this file except in compliance with the License.
6 // You may obtain a copy of the License at
7 //
8 // http://www.apache.org/licenses/LICENSE-2.0
9 //
10 // Unless required by applicable law or agreed to in writing, software
11 // distributed under the License is distributed on an "AS IS" BASIS,
12 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 // See the License for the specific language governing permissions and
14 // limitations under the License.
15 
16 #include "ResourceTracker.h"
17 #include "goldfish_vk_private_defs.h"
18 
19 #include "../OpenglSystemCommon/EmulatorFeatureInfo.h"
20 
21 #ifdef VK_USE_PLATFORM_ANDROID_KHR
22 
23 #include "../egl/goldfish_sync.h"
24 
25 typedef uint32_t zx_handle_t;
26 #define ZX_HANDLE_INVALID         ((zx_handle_t)0)
zx_handle_close(zx_handle_t)27 void zx_handle_close(zx_handle_t) { }
zx_event_create(int,zx_handle_t *)28 void zx_event_create(int, zx_handle_t*) { }
29 
30 #include "AndroidHardwareBuffer.h"
31 
32 #endif // VK_USE_PLATFORM_ANDROID_KHR
33 
34 #ifdef VK_USE_PLATFORM_FUCHSIA
35 
36 #include <cutils/native_handle.h>
37 #include <fuchsia/hardware/goldfish/control/c/fidl.h>
38 #include <fuchsia/sysmem/cpp/fidl.h>
39 #include <lib/fdio/directory.h>
40 #include <lib/fdio/fd.h>
41 #include <lib/fdio/fdio.h>
42 #include <lib/fdio/io.h>
43 #include <lib/zx/channel.h>
44 #include <zircon/process.h>
45 #include <zircon/syscalls.h>
46 #include <zircon/syscalls/object.h>
47 
48 struct AHardwareBuffer;
49 
AHardwareBuffer_release(AHardwareBuffer *)50 void AHardwareBuffer_release(AHardwareBuffer*) { }
51 
AHardwareBuffer_getNativeHandle(AHardwareBuffer *)52 native_handle_t *AHardwareBuffer_getNativeHandle(AHardwareBuffer*) { return NULL; }
53 
getAndroidHardwareBufferUsageFromVkUsage(const VkImageCreateFlags vk_create,const VkImageUsageFlags vk_usage)54 uint64_t getAndroidHardwareBufferUsageFromVkUsage(
55     const VkImageCreateFlags vk_create,
56     const VkImageUsageFlags vk_usage) {
57   return AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
58 }
59 
importAndroidHardwareBuffer(const VkImportAndroidHardwareBufferInfoANDROID * info,struct AHardwareBuffer ** importOut)60 VkResult importAndroidHardwareBuffer(
61     const VkImportAndroidHardwareBufferInfoANDROID* info,
62     struct AHardwareBuffer **importOut) {
63   return VK_SUCCESS;
64 }
65 
createAndroidHardwareBuffer(bool hasDedicatedImage,bool hasDedicatedBuffer,const VkExtent3D & imageExtent,uint32_t imageLayers,VkFormat imageFormat,VkImageUsageFlags imageUsage,VkImageCreateFlags imageCreateFlags,VkDeviceSize bufferSize,VkDeviceSize allocationInfoAllocSize,struct AHardwareBuffer ** out)66 VkResult createAndroidHardwareBuffer(
67     bool hasDedicatedImage,
68     bool hasDedicatedBuffer,
69     const VkExtent3D& imageExtent,
70     uint32_t imageLayers,
71     VkFormat imageFormat,
72     VkImageUsageFlags imageUsage,
73     VkImageCreateFlags imageCreateFlags,
74     VkDeviceSize bufferSize,
75     VkDeviceSize allocationInfoAllocSize,
76     struct AHardwareBuffer **out) {
77   return VK_SUCCESS;
78 }
79 
80 namespace goldfish_vk {
81 struct HostVisibleMemoryVirtualizationInfo;
82 }
83 
getAndroidHardwareBufferPropertiesANDROID(const goldfish_vk::HostVisibleMemoryVirtualizationInfo *,VkDevice,const AHardwareBuffer *,VkAndroidHardwareBufferPropertiesANDROID *)84 VkResult getAndroidHardwareBufferPropertiesANDROID(
85     const goldfish_vk::HostVisibleMemoryVirtualizationInfo*,
86     VkDevice,
87     const AHardwareBuffer*,
88     VkAndroidHardwareBufferPropertiesANDROID*) { return VK_SUCCESS; }
89 
getMemoryAndroidHardwareBufferANDROID(struct AHardwareBuffer **)90 VkResult getMemoryAndroidHardwareBufferANDROID(struct AHardwareBuffer **) { return VK_SUCCESS; }
91 
92 #endif // VK_USE_PLATFORM_FUCHSIA
93 
94 #include "HostVisibleMemoryVirtualization.h"
95 #include "Resources.h"
96 #include "VkEncoder.h"
97 
98 #include "android/base/AlignedBuf.h"
99 #include "android/base/synchronization/AndroidLock.h"
100 
101 #include "gralloc_cb.h"
102 #include "goldfish_address_space.h"
103 #include "goldfish_vk_private_defs.h"
104 #include "vk_format_info.h"
105 #include "vk_util.h"
106 
107 #include <string>
108 #include <unordered_map>
109 #include <set>
110 
111 #include <vndk/hardware_buffer.h>
112 #include <log/log.h>
113 #include <stdlib.h>
114 #include <sync/sync.h>
115 
116 #ifdef VK_USE_PLATFORM_ANDROID_KHR
117 
118 #include <sys/mman.h>
119 #include <sys/syscall.h>
120 
121 #ifdef HOST_BUILD
122 #include "android/utils/tempfile.h"
123 #endif
124 
125 #ifndef HAVE_MEMFD_CREATE
126 static inline int
memfd_create(const char * name,unsigned int flags)127 memfd_create(const char *name, unsigned int flags) {
128 #ifdef HOST_BUILD
129     TempFile* tmpFile = tempfile_create();
130     return open(tempfile_path(tmpFile), O_RDWR);
131     // TODO: Windows is not suppose to support VkSemaphoreGetFdInfoKHR
132 #else
133     return syscall(SYS_memfd_create, name, flags);
134 #endif
135 }
136 #endif // !HAVE_MEMFD_CREATE
137 #endif // !VK_USE_PLATFORM_ANDROID_KHR
138 
139 #define RESOURCE_TRACKER_DEBUG 0
140 
141 #if RESOURCE_TRACKER_DEBUG
142 #undef D
143 #define D(fmt,...) ALOGD("%s: " fmt, __func__, ##__VA_ARGS__);
144 #else
145 #ifndef D
146 #define D(fmt,...)
147 #endif
148 #endif
149 
150 using android::aligned_buf_alloc;
151 using android::aligned_buf_free;
152 using android::base::guest::AutoLock;
153 using android::base::guest::Lock;
154 
155 namespace goldfish_vk {
156 
157 #define MAKE_HANDLE_MAPPING_FOREACH(type_name, map_impl, map_to_u64_impl, map_from_u64_impl) \
158     void mapHandles_##type_name(type_name* handles, size_t count) override { \
159         for (size_t i = 0; i < count; ++i) { \
160             map_impl; \
161         } \
162     } \
163     void mapHandles_##type_name##_u64(const type_name* handles, uint64_t* handle_u64s, size_t count) override { \
164         for (size_t i = 0; i < count; ++i) { \
165             map_to_u64_impl; \
166         } \
167     } \
168     void mapHandles_u64_##type_name(const uint64_t* handle_u64s, type_name* handles, size_t count) override { \
169         for (size_t i = 0; i < count; ++i) { \
170             map_from_u64_impl; \
171         } \
172     } \
173 
174 #define DEFINE_RESOURCE_TRACKING_CLASS(class_name, impl) \
175 class class_name : public VulkanHandleMapping { \
176 public: \
177     virtual ~class_name() { } \
178     GOLDFISH_VK_LIST_HANDLE_TYPES(impl) \
179 }; \
180 
181 #define CREATE_MAPPING_IMPL_FOR_TYPE(type_name) \
182     MAKE_HANDLE_MAPPING_FOREACH(type_name, \
183         handles[i] = new_from_host_##type_name(handles[i]); ResourceTracker::get()->register_##type_name(handles[i]);, \
184         handle_u64s[i] = (uint64_t)new_from_host_##type_name(handles[i]), \
185         handles[i] = (type_name)new_from_host_u64_##type_name(handle_u64s[i]); ResourceTracker::get()->register_##type_name(handles[i]);)
186 
187 #define UNWRAP_MAPPING_IMPL_FOR_TYPE(type_name) \
188     MAKE_HANDLE_MAPPING_FOREACH(type_name, \
189         handles[i] = get_host_##type_name(handles[i]), \
190         handle_u64s[i] = (uint64_t)get_host_u64_##type_name(handles[i]), \
191         handles[i] = (type_name)get_host_##type_name((type_name)handle_u64s[i]))
192 
193 #define DESTROY_MAPPING_IMPL_FOR_TYPE(type_name) \
194     MAKE_HANDLE_MAPPING_FOREACH(type_name, \
195         ResourceTracker::get()->unregister_##type_name(handles[i]); delete_goldfish_##type_name(handles[i]), \
196         (void)handle_u64s[i]; delete_goldfish_##type_name(handles[i]), \
197         (void)handles[i]; delete_goldfish_##type_name((type_name)handle_u64s[i]))
198 
199 DEFINE_RESOURCE_TRACKING_CLASS(CreateMapping, CREATE_MAPPING_IMPL_FOR_TYPE)
200 DEFINE_RESOURCE_TRACKING_CLASS(UnwrapMapping, UNWRAP_MAPPING_IMPL_FOR_TYPE)
201 DEFINE_RESOURCE_TRACKING_CLASS(DestroyMapping, DESTROY_MAPPING_IMPL_FOR_TYPE)
202 
203 class ResourceTracker::Impl {
204 public:
205     Impl() = default;
206     CreateMapping createMapping;
207     UnwrapMapping unwrapMapping;
208     DestroyMapping destroyMapping;
209     DefaultHandleMapping defaultMapping;
210 
211 #define HANDLE_DEFINE_TRIVIAL_INFO_STRUCT(type) \
212     struct type##_Info { \
213         uint32_t unused; \
214     }; \
215 
216     GOLDFISH_VK_LIST_TRIVIAL_HANDLE_TYPES(HANDLE_DEFINE_TRIVIAL_INFO_STRUCT)
217 
218     struct VkInstance_Info {
219         uint32_t highestApiVersion;
220         std::set<std::string> enabledExtensions;
221         // Fodder for vkEnumeratePhysicalDevices.
222         std::vector<VkPhysicalDevice> physicalDevices;
223     };
224 
225     using HostMemBlocks = std::vector<HostMemAlloc>;
226     using HostMemBlockIndex = size_t;
227 
228 #define INVALID_HOST_MEM_BLOCK (-1)
229 
230     struct VkDevice_Info {
231         VkPhysicalDevice physdev;
232         VkPhysicalDeviceProperties props;
233         VkPhysicalDeviceMemoryProperties memProps;
234         std::vector<HostMemBlocks> hostMemBlocks { VK_MAX_MEMORY_TYPES };
235         uint32_t apiVersion;
236         std::set<std::string> enabledExtensions;
237         VkFence fence = VK_NULL_HANDLE;
238     };
239 
240     struct VkDeviceMemory_Info {
241         VkDeviceSize allocationSize = 0;
242         VkDeviceSize mappedSize = 0;
243         uint8_t* mappedPtr = nullptr;
244         uint32_t memoryTypeIndex = 0;
245         bool virtualHostVisibleBacking = false;
246         bool directMapped = false;
247         GoldfishAddressSpaceBlock*
248             goldfishAddressSpaceBlock = nullptr;
249         SubAlloc subAlloc;
250         AHardwareBuffer* ahw = nullptr;
251         zx_handle_t vmoHandle = ZX_HANDLE_INVALID;
252     };
253 
254     // custom guest-side structs for images/buffers because of AHardwareBuffer :((
255     struct VkImage_Info {
256         VkDevice device;
257         VkImageCreateInfo createInfo;
258         bool external = false;
259         VkExternalMemoryImageCreateInfo externalCreateInfo;
260         VkDeviceMemory currentBacking = VK_NULL_HANDLE;
261         VkDeviceSize currentBackingOffset = 0;
262         VkDeviceSize currentBackingSize = 0;
263     };
264 
265     struct VkBuffer_Info {
266         VkDevice device;
267         VkBufferCreateInfo createInfo;
268         bool external = false;
269         VkExternalMemoryBufferCreateInfo externalCreateInfo;
270         VkDeviceMemory currentBacking = VK_NULL_HANDLE;
271         VkDeviceSize currentBackingOffset = 0;
272         VkDeviceSize currentBackingSize = 0;
273     };
274 
275     struct VkSemaphore_Info {
276         VkDevice device;
277         zx_handle_t eventHandle = ZX_HANDLE_INVALID;
278         int syncFd = -1;
279     };
280 
281     struct VkDescriptorUpdateTemplate_Info {
282         std::vector<VkDescriptorUpdateTemplateEntry> templateEntries;
283 
284         // Flattened versions
285         std::vector<uint32_t> imageInfoEntryIndices;
286         std::vector<uint32_t> bufferInfoEntryIndices;
287         std::vector<uint32_t> bufferViewEntryIndices;
288         std::vector<VkDescriptorImageInfo> imageInfos;
289         std::vector<VkDescriptorBufferInfo> bufferInfos;
290         std::vector<VkBufferView> bufferViews;
291     };
292 
293 #define HANDLE_REGISTER_IMPL_IMPL(type) \
294     std::unordered_map<type, type##_Info> info_##type; \
295     void register_##type(type obj) { \
296         AutoLock lock(mLock); \
297         info_##type[obj] = type##_Info(); \
298     } \
299 
300 #define HANDLE_UNREGISTER_IMPL_IMPL(type) \
301     void unregister_##type(type obj) { \
302         AutoLock lock(mLock); \
303         info_##type.erase(obj); \
304     } \
305 
306     GOLDFISH_VK_LIST_HANDLE_TYPES(HANDLE_REGISTER_IMPL_IMPL)
GOLDFISH_VK_LIST_TRIVIAL_HANDLE_TYPES(HANDLE_UNREGISTER_IMPL_IMPL)307     GOLDFISH_VK_LIST_TRIVIAL_HANDLE_TYPES(HANDLE_UNREGISTER_IMPL_IMPL)
308 
309     void unregister_VkInstance(VkInstance instance) {
310         AutoLock lock(mLock);
311 
312         auto it = info_VkInstance.find(instance);
313         if (it == info_VkInstance.end()) return;
314         auto info = it->second;
315         info_VkInstance.erase(instance);
316         lock.unlock();
317     }
318 
unregister_VkDevice(VkDevice device)319     void unregister_VkDevice(VkDevice device) {
320         AutoLock lock(mLock);
321 
322         auto it = info_VkDevice.find(device);
323         if (it == info_VkDevice.end()) return;
324         auto info = it->second;
325         info_VkDevice.erase(device);
326         lock.unlock();
327     }
328 
unregister_VkDeviceMemory(VkDeviceMemory mem)329     void unregister_VkDeviceMemory(VkDeviceMemory mem) {
330         AutoLock lock(mLock);
331 
332         auto it = info_VkDeviceMemory.find(mem);
333         if (it == info_VkDeviceMemory.end()) return;
334 
335         auto& memInfo = it->second;
336 
337         if (memInfo.ahw) {
338             AHardwareBuffer_release(memInfo.ahw);
339         }
340 
341         if (memInfo.vmoHandle != ZX_HANDLE_INVALID) {
342             zx_handle_close(memInfo.vmoHandle);
343         }
344 
345         if (memInfo.mappedPtr &&
346             !memInfo.virtualHostVisibleBacking &&
347             !memInfo.directMapped) {
348             aligned_buf_free(memInfo.mappedPtr);
349         }
350 
351         if (memInfo.directMapped) {
352             subFreeHostMemory(&memInfo.subAlloc);
353         }
354 
355         delete memInfo.goldfishAddressSpaceBlock;
356 
357         info_VkDeviceMemory.erase(mem);
358     }
359 
unregister_VkImage(VkImage img)360     void unregister_VkImage(VkImage img) {
361         AutoLock lock(mLock);
362 
363         auto it = info_VkImage.find(img);
364         if (it == info_VkImage.end()) return;
365 
366         auto& imageInfo = it->second;
367 
368         info_VkImage.erase(img);
369     }
370 
unregister_VkBuffer(VkBuffer buf)371     void unregister_VkBuffer(VkBuffer buf) {
372         AutoLock lock(mLock);
373 
374         auto it = info_VkBuffer.find(buf);
375         if (it == info_VkBuffer.end()) return;
376 
377         info_VkBuffer.erase(buf);
378     }
379 
unregister_VkSemaphore(VkSemaphore sem)380     void unregister_VkSemaphore(VkSemaphore sem) {
381         AutoLock lock(mLock);
382 
383         auto it = info_VkSemaphore.find(sem);
384         if (it == info_VkSemaphore.end()) return;
385 
386         auto& semInfo = it->second;
387 
388         if (semInfo.eventHandle != ZX_HANDLE_INVALID) {
389             zx_handle_close(semInfo.eventHandle);
390         }
391 
392         info_VkSemaphore.erase(sem);
393     }
394 
unregister_VkDescriptorUpdateTemplate(VkDescriptorUpdateTemplate templ)395     void unregister_VkDescriptorUpdateTemplate(VkDescriptorUpdateTemplate templ) {
396         info_VkDescriptorUpdateTemplate.erase(templ);
397     }
398 
399     // TODO: Upgrade to 1.1
400     static constexpr uint32_t kMaxApiVersion = VK_MAKE_VERSION(1, 1, 0);
401     static constexpr uint32_t kMinApiVersion = VK_MAKE_VERSION(1, 0, 0);
402 
setInstanceInfo(VkInstance instance,uint32_t enabledExtensionCount,const char * const * ppEnabledExtensionNames,uint32_t apiVersion)403     void setInstanceInfo(VkInstance instance,
404                          uint32_t enabledExtensionCount,
405                          const char* const* ppEnabledExtensionNames,
406                          uint32_t apiVersion) {
407         AutoLock lock(mLock);
408         auto& info = info_VkInstance[instance];
409         info.highestApiVersion = apiVersion;
410 
411         if (!ppEnabledExtensionNames) return;
412 
413         for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
414             info.enabledExtensions.insert(ppEnabledExtensionNames[i]);
415         }
416     }
417 
setDeviceInfo(VkDevice device,VkPhysicalDevice physdev,VkPhysicalDeviceProperties props,VkPhysicalDeviceMemoryProperties memProps,uint32_t enabledExtensionCount,const char * const * ppEnabledExtensionNames)418     void setDeviceInfo(VkDevice device,
419                        VkPhysicalDevice physdev,
420                        VkPhysicalDeviceProperties props,
421                        VkPhysicalDeviceMemoryProperties memProps,
422                        uint32_t enabledExtensionCount,
423                        const char* const* ppEnabledExtensionNames) {
424         AutoLock lock(mLock);
425         auto& info = info_VkDevice[device];
426         info.physdev = physdev;
427         info.props = props;
428         info.memProps = memProps;
429         initHostVisibleMemoryVirtualizationInfo(
430             physdev, &memProps,
431             mFeatureInfo->hasDirectMem,
432             &mHostVisibleMemoryVirtInfo);
433         info.apiVersion = props.apiVersion;
434 
435         if (!ppEnabledExtensionNames) return;
436 
437         for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
438             info.enabledExtensions.insert(ppEnabledExtensionNames[i]);
439         }
440     }
441 
setDeviceMemoryInfo(VkDevice device,VkDeviceMemory memory,VkDeviceSize allocationSize,VkDeviceSize mappedSize,uint8_t * ptr,uint32_t memoryTypeIndex,AHardwareBuffer * ahw=nullptr,zx_handle_t vmoHandle=ZX_HANDLE_INVALID)442     void setDeviceMemoryInfo(VkDevice device,
443                              VkDeviceMemory memory,
444                              VkDeviceSize allocationSize,
445                              VkDeviceSize mappedSize,
446                              uint8_t* ptr,
447                              uint32_t memoryTypeIndex,
448                              AHardwareBuffer* ahw = nullptr,
449                              zx_handle_t vmoHandle = ZX_HANDLE_INVALID) {
450         AutoLock lock(mLock);
451         auto& deviceInfo = info_VkDevice[device];
452         auto& info = info_VkDeviceMemory[memory];
453 
454         info.allocationSize = allocationSize;
455         info.mappedSize = mappedSize;
456         info.mappedPtr = ptr;
457         info.memoryTypeIndex = memoryTypeIndex;
458         info.ahw = ahw;
459         info.vmoHandle = vmoHandle;
460     }
461 
setImageInfo(VkImage image,VkDevice device,const VkImageCreateInfo * pCreateInfo)462     void setImageInfo(VkImage image,
463                       VkDevice device,
464                       const VkImageCreateInfo *pCreateInfo) {
465         AutoLock lock(mLock);
466         auto& info = info_VkImage[image];
467 
468         info.device = device;
469         info.createInfo = *pCreateInfo;
470     }
471 
isMemoryTypeHostVisible(VkDevice device,uint32_t typeIndex) const472     bool isMemoryTypeHostVisible(VkDevice device, uint32_t typeIndex) const {
473         AutoLock lock(mLock);
474         const auto it = info_VkDevice.find(device);
475 
476         if (it == info_VkDevice.end()) return false;
477 
478         const auto& info = it->second;
479         return info.memProps.memoryTypes[typeIndex].propertyFlags &
480                VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
481     }
482 
getMappedPointer(VkDeviceMemory memory)483     uint8_t* getMappedPointer(VkDeviceMemory memory) {
484         AutoLock lock(mLock);
485         const auto it = info_VkDeviceMemory.find(memory);
486         if (it == info_VkDeviceMemory.end()) return nullptr;
487 
488         const auto& info = it->second;
489         return info.mappedPtr;
490     }
491 
getMappedSize(VkDeviceMemory memory)492     VkDeviceSize getMappedSize(VkDeviceMemory memory) {
493         AutoLock lock(mLock);
494         const auto it = info_VkDeviceMemory.find(memory);
495         if (it == info_VkDeviceMemory.end()) return 0;
496 
497         const auto& info = it->second;
498         return info.mappedSize;
499     }
500 
getNonCoherentExtendedSize(VkDevice device,VkDeviceSize basicSize) const501     VkDeviceSize getNonCoherentExtendedSize(VkDevice device, VkDeviceSize basicSize) const {
502         AutoLock lock(mLock);
503         const auto it = info_VkDevice.find(device);
504         if (it == info_VkDevice.end()) return basicSize;
505         const auto& info = it->second;
506 
507         VkDeviceSize nonCoherentAtomSize =
508             info.props.limits.nonCoherentAtomSize;
509         VkDeviceSize atoms =
510             (basicSize + nonCoherentAtomSize - 1) / nonCoherentAtomSize;
511         return atoms * nonCoherentAtomSize;
512     }
513 
isValidMemoryRange(const VkMappedMemoryRange & range) const514     bool isValidMemoryRange(const VkMappedMemoryRange& range) const {
515         AutoLock lock(mLock);
516         const auto it = info_VkDeviceMemory.find(range.memory);
517         if (it == info_VkDeviceMemory.end()) return false;
518         const auto& info = it->second;
519 
520         if (!info.mappedPtr) return false;
521 
522         VkDeviceSize offset = range.offset;
523         VkDeviceSize size = range.size;
524 
525         if (size == VK_WHOLE_SIZE) {
526             return offset <= info.mappedSize;
527         }
528 
529         return offset + size <= info.mappedSize;
530     }
531 
setupFeatures(const EmulatorFeatureInfo * features)532     void setupFeatures(const EmulatorFeatureInfo* features) {
533         if (!features || mFeatureInfo) return;
534         mFeatureInfo.reset(new EmulatorFeatureInfo);
535         *mFeatureInfo = *features;
536 
537         if (mFeatureInfo->hasDirectMem) {
538             mGoldfishAddressSpaceBlockProvider.reset(
539                 new GoldfishAddressSpaceBlockProvider);
540         }
541 
542 #ifdef VK_USE_PLATFORM_FUCHSIA
543         if (mFeatureInfo->hasVulkan) {
544             int fd = open("/dev/class/goldfish-control/000", O_RDWR);
545             if (fd < 0) {
546                 ALOGE("failed to open control device");
547                 abort();
548             }
549             zx_status_t status = fdio_get_service_handle(fd, &mControlDevice);
550             if (status != ZX_OK) {
551                 ALOGE("failed to get control service handle, status %d", status);
552                 abort();
553             }
554             status = fuchsia_hardware_goldfish_control_DeviceConnectSysmem(
555                 mControlDevice,
556                 mSysmemAllocator.NewRequest().TakeChannel().release());
557             if (status != ZX_OK) {
558                 ALOGE("failed to get sysmem connection, status %d", status);
559                 abort();
560             }
561         }
562 #endif
563     }
564 
hostSupportsVulkan() const565     bool hostSupportsVulkan() const {
566         if (!mFeatureInfo) return false;
567 
568         return mFeatureInfo->hasVulkan;
569     }
570 
usingDirectMapping() const571     bool usingDirectMapping() const {
572         return mHostVisibleMemoryVirtInfo.virtualizationSupported;
573     }
574 
supportsDeferredCommands() const575     bool supportsDeferredCommands() const {
576         if (!mFeatureInfo) return false;
577         return mFeatureInfo->hasDeferredVulkanCommands;
578     }
579 
getHostInstanceExtensionIndex(const std::string & extName) const580     int getHostInstanceExtensionIndex(const std::string& extName) const {
581         int i = 0;
582         for (const auto& prop : mHostInstanceExtensions) {
583             if (extName == std::string(prop.extensionName)) {
584                 return i;
585             }
586             ++i;
587         }
588         return -1;
589     }
590 
getHostDeviceExtensionIndex(const std::string & extName) const591     int getHostDeviceExtensionIndex(const std::string& extName) const {
592         int i = 0;
593         for (const auto& prop : mHostDeviceExtensions) {
594             if (extName == std::string(prop.extensionName)) {
595                 return i;
596             }
597             ++i;
598         }
599         return -1;
600     }
601 
deviceMemoryTransform_tohost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)602     void deviceMemoryTransform_tohost(
603         VkDeviceMemory* memory, uint32_t memoryCount,
604         VkDeviceSize* offset, uint32_t offsetCount,
605         VkDeviceSize* size, uint32_t sizeCount,
606         uint32_t* typeIndex, uint32_t typeIndexCount,
607         uint32_t* typeBits, uint32_t typeBitsCount) {
608 
609         (void)memoryCount;
610         (void)offsetCount;
611         (void)sizeCount;
612 
613         const auto& hostVirt =
614             mHostVisibleMemoryVirtInfo;
615 
616         if (!hostVirt.virtualizationSupported) return;
617 
618         if (memory) {
619             AutoLock lock (mLock);
620 
621             for (uint32_t i = 0; i < memoryCount; ++i) {
622                 VkDeviceMemory mem = memory[i];
623 
624                 auto it = info_VkDeviceMemory.find(mem);
625                 if (it == info_VkDeviceMemory.end()) return;
626 
627                 const auto& info = it->second;
628 
629                 if (!info.directMapped) continue;
630 
631                 memory[i] = info.subAlloc.baseMemory;
632 
633                 if (offset) {
634                     offset[i] = info.subAlloc.baseOffset + offset[i];
635                 }
636 
637                 if (size) {
638                     if (size[i] == VK_WHOLE_SIZE) {
639                         size[i] = info.subAlloc.subMappedSize;
640                     }
641                 }
642 
643                 // TODO
644                 (void)memory;
645                 (void)offset;
646                 (void)size;
647             }
648         }
649 
650         for (uint32_t i = 0; i < typeIndexCount; ++i) {
651             typeIndex[i] =
652                 hostVirt.memoryTypeIndexMappingToHost[typeIndex[i]];
653         }
654 
655         for (uint32_t i = 0; i < typeBitsCount; ++i) {
656             uint32_t bits = 0;
657             for (uint32_t j = 0; j < VK_MAX_MEMORY_TYPES; ++j) {
658                 bool guestHas = typeBits[i] & (1 << j);
659                 uint32_t hostIndex =
660                     hostVirt.memoryTypeIndexMappingToHost[j];
661                 bits |= guestHas ? (1 << hostIndex) : 0;
662             }
663             typeBits[i] = bits;
664         }
665     }
666 
deviceMemoryTransform_fromhost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)667     void deviceMemoryTransform_fromhost(
668         VkDeviceMemory* memory, uint32_t memoryCount,
669         VkDeviceSize* offset, uint32_t offsetCount,
670         VkDeviceSize* size, uint32_t sizeCount,
671         uint32_t* typeIndex, uint32_t typeIndexCount,
672         uint32_t* typeBits, uint32_t typeBitsCount) {
673 
674         (void)memoryCount;
675         (void)offsetCount;
676         (void)sizeCount;
677 
678         const auto& hostVirt =
679             mHostVisibleMemoryVirtInfo;
680 
681         if (!hostVirt.virtualizationSupported) return;
682 
683         AutoLock lock (mLock);
684 
685         for (uint32_t i = 0; i < memoryCount; ++i) {
686             // TODO
687             (void)memory;
688             (void)offset;
689             (void)size;
690         }
691 
692         for (uint32_t i = 0; i < typeIndexCount; ++i) {
693             typeIndex[i] =
694                 hostVirt.memoryTypeIndexMappingFromHost[typeIndex[i]];
695         }
696 
697         for (uint32_t i = 0; i < typeBitsCount; ++i) {
698             uint32_t bits = 0;
699             for (uint32_t j = 0; j < VK_MAX_MEMORY_TYPES; ++j) {
700                 bool hostHas = typeBits[i] & (1 << j);
701                 uint32_t guestIndex =
702                     hostVirt.memoryTypeIndexMappingFromHost[j];
703                 bits |= hostHas ? (1 << guestIndex) : 0;
704 
705                 if (hostVirt.memoryTypeBitsShouldAdvertiseBoth[j]) {
706                     bits |= hostHas ? (1 << j) : 0;
707                 }
708             }
709             typeBits[i] = bits;
710         }
711     }
712 
on_vkEnumerateInstanceExtensionProperties(void * context,VkResult,const char *,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)713     VkResult on_vkEnumerateInstanceExtensionProperties(
714         void* context,
715         VkResult,
716         const char*,
717         uint32_t* pPropertyCount,
718         VkExtensionProperties* pProperties) {
719         std::vector<const char*> allowedExtensionNames = {
720             "VK_KHR_get_physical_device_properties2",
721             "VK_KHR_sampler_ycbcr_conversion",
722 #ifdef VK_USE_PLATFORM_ANDROID_KHR
723             "VK_KHR_external_semaphore_capabilities",
724             "VK_KHR_external_memory_capabilities",
725 #endif
726             // TODO:
727             // VK_KHR_external_memory_capabilities
728         };
729 
730         VkEncoder* enc = (VkEncoder*)context;
731 
732         // Only advertise a select set of extensions.
733         if (mHostInstanceExtensions.empty()) {
734             uint32_t hostPropCount = 0;
735             enc->vkEnumerateInstanceExtensionProperties(nullptr, &hostPropCount, nullptr);
736             mHostInstanceExtensions.resize(hostPropCount);
737 
738             VkResult hostRes =
739                 enc->vkEnumerateInstanceExtensionProperties(
740                     nullptr, &hostPropCount, mHostInstanceExtensions.data());
741 
742             if (hostRes != VK_SUCCESS) {
743                 return hostRes;
744             }
745         }
746 
747         std::vector<VkExtensionProperties> filteredExts;
748 
749         for (size_t i = 0; i < allowedExtensionNames.size(); ++i) {
750             auto extIndex = getHostInstanceExtensionIndex(allowedExtensionNames[i]);
751             if (extIndex != -1) {
752                 filteredExts.push_back(mHostInstanceExtensions[extIndex]);
753             }
754         }
755 
756         VkExtensionProperties anbExtProps[] = {
757 #ifdef VK_USE_PLATFORM_ANDROID_KHR
758             { "VK_ANDROID_native_buffer", 7 },
759 #endif
760 #ifdef VK_USE_PLATFORM_FUCHSIA
761             { "VK_KHR_external_memory_capabilities", 1},
762             { "VK_KHR_external_semaphore_capabilities", 1},
763 #endif
764         };
765 
766         for (auto& anbExtProp: anbExtProps) {
767             filteredExts.push_back(anbExtProp);
768         }
769 
770         if (pPropertyCount) {
771             *pPropertyCount = filteredExts.size();
772         }
773 
774         if (pPropertyCount && pProperties) {
775             for (size_t i = 0; i < *pPropertyCount; ++i) {
776                 pProperties[i] = filteredExts[i];
777             }
778         }
779 
780         return VK_SUCCESS;
781     }
782 
on_vkEnumerateDeviceExtensionProperties(void * context,VkResult,VkPhysicalDevice physdev,const char *,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)783     VkResult on_vkEnumerateDeviceExtensionProperties(
784         void* context,
785         VkResult,
786         VkPhysicalDevice physdev,
787         const char*,
788         uint32_t* pPropertyCount,
789         VkExtensionProperties* pProperties) {
790 
791         std::vector<const char*> allowedExtensionNames = {
792             "VK_KHR_maintenance1",
793             "VK_KHR_get_memory_requirements2",
794             "VK_KHR_dedicated_allocation",
795             "VK_KHR_bind_memory2",
796             "VK_KHR_sampler_ycbcr_conversion",
797 #ifdef VK_USE_PLATFORM_ANDROID_KHR
798             "VK_KHR_external_semaphore",
799             "VK_KHR_external_semaphore_fd",
800             // "VK_KHR_external_semaphore_win32", not exposed because it's translated to fd
801             "VK_KHR_external_memory",
802 #endif
803             // "VK_KHR_maintenance2",
804             // "VK_KHR_maintenance3",
805             // TODO:
806             // VK_KHR_external_memory_capabilities
807         };
808 
809         VkEncoder* enc = (VkEncoder*)context;
810 
811         if (mHostDeviceExtensions.empty()) {
812             uint32_t hostPropCount = 0;
813             enc->vkEnumerateDeviceExtensionProperties(physdev, nullptr, &hostPropCount, nullptr);
814             mHostDeviceExtensions.resize(hostPropCount);
815 
816             VkResult hostRes =
817                 enc->vkEnumerateDeviceExtensionProperties(
818                     physdev, nullptr, &hostPropCount, mHostDeviceExtensions.data());
819 
820             if (hostRes != VK_SUCCESS) {
821                 return hostRes;
822             }
823         }
824 
825         bool hostHasWin32ExternalSemaphore =
826             getHostDeviceExtensionIndex(
827                 "VK_KHR_external_semaphore_win32") != -1;
828 
829         bool hostHasPosixExternalSemaphore =
830             getHostDeviceExtensionIndex(
831                 "VK_KHR_external_semaphore_fd") != -1;
832 
833         ALOGD("%s: host has ext semaphore? win32 %d posix %d\n", __func__,
834                 hostHasWin32ExternalSemaphore,
835                 hostHasPosixExternalSemaphore);
836 
837         bool hostSupportsExternalSemaphore =
838             hostHasWin32ExternalSemaphore ||
839             hostHasPosixExternalSemaphore;
840 
841         std::vector<VkExtensionProperties> filteredExts;
842 
843         for (size_t i = 0; i < allowedExtensionNames.size(); ++i) {
844             auto extIndex = getHostDeviceExtensionIndex(allowedExtensionNames[i]);
845             if (extIndex != -1) {
846                 filteredExts.push_back(mHostDeviceExtensions[extIndex]);
847             }
848         }
849 
850         VkExtensionProperties anbExtProps[] = {
851 #ifdef VK_USE_PLATFORM_ANDROID_KHR
852             { "VK_ANDROID_native_buffer", 7 },
853 #endif
854 #ifdef VK_USE_PLATFORM_FUCHSIA
855             { "VK_KHR_external_memory", 1 },
856             { "VK_KHR_external_semaphore", 1 },
857             { "VK_FUCHSIA_external_semaphore", 1 },
858             { "VK_FUCHSIA_buffer_collection", 1 },
859 #endif
860         };
861 
862         for (auto& anbExtProp: anbExtProps) {
863             filteredExts.push_back(anbExtProp);
864         }
865 
866         if (hostSupportsExternalSemaphore &&
867             !hostHasPosixExternalSemaphore) {
868             filteredExts.push_back(
869                 { "VK_KHR_external_semaphore_fd", 1});
870         }
871 
872         bool win32ExtMemAvailable =
873             getHostDeviceExtensionIndex(
874                 "VK_KHR_external_memory_win32") != -1;
875         bool posixExtMemAvailable =
876             getHostDeviceExtensionIndex(
877                 "VK_KHR_external_memory_fd") != -1;
878 
879         bool hostHasExternalMemorySupport =
880             win32ExtMemAvailable || posixExtMemAvailable;
881 
882         if (hostHasExternalMemorySupport) {
883 #ifdef VK_USE_PLATFORM_ANDROID_KHR
884             filteredExts.push_back({
885                 "VK_ANDROID_external_memory_android_hardware_buffer", 7
886             });
887 #endif
888 #ifdef VK_USE_PLATFORM_FUCHSIA
889             filteredExts.push_back({
890                 "VK_FUCHSIA_external_memory", 1
891             });
892 #endif
893         }
894 
895         if (pPropertyCount) {
896             *pPropertyCount = filteredExts.size();
897         }
898 
899         if (pPropertyCount && pProperties) {
900             for (size_t i = 0; i < *pPropertyCount; ++i) {
901                 pProperties[i] = filteredExts[i];
902             }
903         }
904 
905 
906         return VK_SUCCESS;
907     }
908 
on_vkEnumeratePhysicalDevices(void * context,VkResult,VkInstance instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)909     VkResult on_vkEnumeratePhysicalDevices(
910         void* context, VkResult,
911         VkInstance instance, uint32_t* pPhysicalDeviceCount,
912         VkPhysicalDevice* pPhysicalDevices) {
913 
914         VkEncoder* enc = (VkEncoder*)context;
915 
916         if (!instance) return VK_ERROR_INITIALIZATION_FAILED;
917 
918         if (!pPhysicalDeviceCount) return VK_ERROR_INITIALIZATION_FAILED;
919 
920         AutoLock lock(mLock);
921 
922         auto it = info_VkInstance.find(instance);
923 
924         if (it == info_VkInstance.end()) return VK_ERROR_INITIALIZATION_FAILED;
925 
926         auto& info = it->second;
927 
928         if (info.physicalDevices.empty()) {
929             uint32_t physdevCount = 0;
930 
931             lock.unlock();
932             VkResult countRes = enc->vkEnumeratePhysicalDevices(
933                 instance, &physdevCount, nullptr);
934             lock.lock();
935 
936             if (countRes != VK_SUCCESS) {
937                 ALOGE("%s: failed: could not count host physical devices. "
938                       "Error %d\n", __func__, countRes);
939                 return countRes;
940             }
941 
942             info.physicalDevices.resize(physdevCount);
943 
944             lock.unlock();
945             VkResult enumRes = enc->vkEnumeratePhysicalDevices(
946                 instance, &physdevCount, info.physicalDevices.data());
947             lock.lock();
948 
949             if (enumRes != VK_SUCCESS) {
950                 ALOGE("%s: failed: could not retrieve host physical devices. "
951                       "Error %d\n", __func__, enumRes);
952                 return enumRes;
953             }
954         }
955 
956         *pPhysicalDeviceCount = (uint32_t)info.physicalDevices.size();
957 
958         if (pPhysicalDevices && *pPhysicalDeviceCount) {
959             memcpy(pPhysicalDevices,
960                    info.physicalDevices.data(),
961                    sizeof(VkPhysicalDevice) *
962                    info.physicalDevices.size());
963         }
964 
965         return VK_SUCCESS;
966     }
967 
on_vkGetPhysicalDeviceMemoryProperties(void *,VkPhysicalDevice physdev,VkPhysicalDeviceMemoryProperties * out)968     void on_vkGetPhysicalDeviceMemoryProperties(
969         void*,
970         VkPhysicalDevice physdev,
971         VkPhysicalDeviceMemoryProperties* out) {
972 
973         initHostVisibleMemoryVirtualizationInfo(
974             physdev,
975             out,
976             mFeatureInfo->hasDirectMem,
977             &mHostVisibleMemoryVirtInfo);
978 
979         if (mHostVisibleMemoryVirtInfo.virtualizationSupported) {
980             *out = mHostVisibleMemoryVirtInfo.guestMemoryProperties;
981         }
982     }
983 
on_vkGetPhysicalDeviceMemoryProperties2(void *,VkPhysicalDevice physdev,VkPhysicalDeviceMemoryProperties2 * out)984     void on_vkGetPhysicalDeviceMemoryProperties2(
985         void*,
986         VkPhysicalDevice physdev,
987         VkPhysicalDeviceMemoryProperties2* out) {
988 
989         initHostVisibleMemoryVirtualizationInfo(
990             physdev,
991             &out->memoryProperties,
992             mFeatureInfo->hasDirectMem,
993             &mHostVisibleMemoryVirtInfo);
994 
995         if (mHostVisibleMemoryVirtInfo.virtualizationSupported) {
996             out->memoryProperties = mHostVisibleMemoryVirtInfo.guestMemoryProperties;
997         }
998     }
999 
on_vkCreateInstance(void * context,VkResult input_result,const VkInstanceCreateInfo * createInfo,const VkAllocationCallbacks *,VkInstance * pInstance)1000     VkResult on_vkCreateInstance(
1001         void* context,
1002         VkResult input_result,
1003         const VkInstanceCreateInfo* createInfo,
1004         const VkAllocationCallbacks*,
1005         VkInstance* pInstance) {
1006 
1007         if (input_result != VK_SUCCESS) return input_result;
1008 
1009         VkEncoder* enc = (VkEncoder*)context;
1010 
1011         uint32_t apiVersion;
1012         VkResult enumInstanceVersionRes =
1013             enc->vkEnumerateInstanceVersion(&apiVersion);
1014 
1015         setInstanceInfo(
1016             *pInstance,
1017             createInfo->enabledExtensionCount,
1018             createInfo->ppEnabledExtensionNames,
1019             apiVersion);
1020 
1021         return input_result;
1022     }
1023 
on_vkCreateDevice(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks *,VkDevice * pDevice)1024     VkResult on_vkCreateDevice(
1025         void* context,
1026         VkResult input_result,
1027         VkPhysicalDevice physicalDevice,
1028         const VkDeviceCreateInfo* pCreateInfo,
1029         const VkAllocationCallbacks*,
1030         VkDevice* pDevice) {
1031 
1032         if (input_result != VK_SUCCESS) return input_result;
1033 
1034         VkEncoder* enc = (VkEncoder*)context;
1035 
1036         VkPhysicalDeviceProperties props;
1037         VkPhysicalDeviceMemoryProperties memProps;
1038         enc->vkGetPhysicalDeviceProperties(physicalDevice, &props);
1039         enc->vkGetPhysicalDeviceMemoryProperties(physicalDevice, &memProps);
1040 
1041         setDeviceInfo(
1042             *pDevice, physicalDevice, props, memProps,
1043             pCreateInfo->enabledExtensionCount, pCreateInfo->ppEnabledExtensionNames);
1044 
1045         return input_result;
1046     }
1047 
on_vkDestroyDevice_pre(void * context,VkDevice device,const VkAllocationCallbacks *)1048     void on_vkDestroyDevice_pre(
1049         void* context,
1050         VkDevice device,
1051         const VkAllocationCallbacks*) {
1052 
1053         AutoLock lock(mLock);
1054 
1055         auto it = info_VkDevice.find(device);
1056         if (it == info_VkDevice.end()) return;
1057         auto info = it->second;
1058 
1059         lock.unlock();
1060 
1061         VkEncoder* enc = (VkEncoder*)context;
1062 
1063         for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) {
1064             for (auto& block : info.hostMemBlocks[i]) {
1065                 destroyHostMemAlloc(enc, device, &block);
1066             }
1067         }
1068 
1069         if (info.fence != VK_NULL_HANDLE) {
1070             enc->vkDestroyFence(device, info.fence, nullptr);
1071         }
1072     }
1073 
on_vkGetAndroidHardwareBufferPropertiesANDROID(void *,VkResult,VkDevice device,const AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties)1074     VkResult on_vkGetAndroidHardwareBufferPropertiesANDROID(
1075         void*, VkResult,
1076         VkDevice device,
1077         const AHardwareBuffer* buffer,
1078         VkAndroidHardwareBufferPropertiesANDROID* pProperties) {
1079         return getAndroidHardwareBufferPropertiesANDROID(
1080             &mHostVisibleMemoryVirtInfo,
1081             device, buffer, pProperties);
1082     }
1083 
on_vkGetMemoryAndroidHardwareBufferANDROID(void *,VkResult,VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer)1084     VkResult on_vkGetMemoryAndroidHardwareBufferANDROID(
1085         void*, VkResult,
1086         VkDevice device,
1087         const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
1088         struct AHardwareBuffer** pBuffer) {
1089 
1090         if (!pInfo) return VK_ERROR_INITIALIZATION_FAILED;
1091         if (!pInfo->memory) return VK_ERROR_INITIALIZATION_FAILED;
1092 
1093         AutoLock lock(mLock);
1094 
1095         auto deviceIt = info_VkDevice.find(device);
1096 
1097         if (deviceIt == info_VkDevice.end()) {
1098             return VK_ERROR_INITIALIZATION_FAILED;
1099         }
1100 
1101         auto memoryIt = info_VkDeviceMemory.find(pInfo->memory);
1102 
1103         if (memoryIt == info_VkDeviceMemory.end()) {
1104             return VK_ERROR_INITIALIZATION_FAILED;
1105         }
1106 
1107         auto& info = memoryIt->second;
1108 
1109         VkResult queryRes =
1110             getMemoryAndroidHardwareBufferANDROID(&info.ahw);
1111 
1112         if (queryRes != VK_SUCCESS) return queryRes;
1113 
1114         *pBuffer = info.ahw;
1115 
1116         return queryRes;
1117     }
1118 
1119 #ifdef VK_USE_PLATFORM_FUCHSIA
on_vkGetMemoryZirconHandleFUCHSIA(void *,VkResult,VkDevice device,const VkMemoryGetZirconHandleInfoFUCHSIA * pInfo,uint32_t * pHandle)1120     VkResult on_vkGetMemoryZirconHandleFUCHSIA(
1121         void*, VkResult,
1122         VkDevice device,
1123         const VkMemoryGetZirconHandleInfoFUCHSIA* pInfo,
1124         uint32_t* pHandle) {
1125 
1126         if (!pInfo) return VK_ERROR_INITIALIZATION_FAILED;
1127         if (!pInfo->memory) return VK_ERROR_INITIALIZATION_FAILED;
1128 
1129         AutoLock lock(mLock);
1130 
1131         auto deviceIt = info_VkDevice.find(device);
1132 
1133         if (deviceIt == info_VkDevice.end()) {
1134             return VK_ERROR_INITIALIZATION_FAILED;
1135         }
1136 
1137         auto memoryIt = info_VkDeviceMemory.find(pInfo->memory);
1138 
1139         if (memoryIt == info_VkDeviceMemory.end()) {
1140             return VK_ERROR_INITIALIZATION_FAILED;
1141         }
1142 
1143         auto& info = memoryIt->second;
1144 
1145         if (info.vmoHandle == ZX_HANDLE_INVALID) {
1146             ALOGE("%s: memory cannot be exported", __func__);
1147             return VK_ERROR_INITIALIZATION_FAILED;
1148         }
1149 
1150         *pHandle = ZX_HANDLE_INVALID;
1151         zx_handle_duplicate(info.vmoHandle, ZX_RIGHT_SAME_RIGHTS, pHandle);
1152         return VK_SUCCESS;
1153     }
1154 
on_vkGetMemoryZirconHandlePropertiesFUCHSIA(void *,VkResult,VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,uint32_t handle,VkMemoryZirconHandlePropertiesFUCHSIA * pProperties)1155     VkResult on_vkGetMemoryZirconHandlePropertiesFUCHSIA(
1156         void*, VkResult,
1157         VkDevice device,
1158         VkExternalMemoryHandleTypeFlagBits handleType,
1159         uint32_t handle,
1160         VkMemoryZirconHandlePropertiesFUCHSIA* pProperties) {
1161         if (handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_TEMP_ZIRCON_VMO_BIT_FUCHSIA) {
1162             return VK_ERROR_INITIALIZATION_FAILED;
1163         }
1164         if (pProperties->sType != VK_STRUCTURE_TYPE_TEMP_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA) {
1165             return VK_ERROR_INITIALIZATION_FAILED;
1166         }
1167 
1168         AutoLock lock(mLock);
1169 
1170         auto deviceIt = info_VkDevice.find(device);
1171 
1172         if (deviceIt == info_VkDevice.end()) {
1173             return VK_ERROR_INITIALIZATION_FAILED;
1174         }
1175 
1176         auto& info = deviceIt->second;
1177 
1178         // Device local memory type supported.
1179         pProperties->memoryTypeBits = 0;
1180         for (uint32_t i = 0; i < info.memProps.memoryTypeCount; ++i) {
1181             if (info.memProps.memoryTypes[i].propertyFlags &
1182                 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
1183                 pProperties->memoryTypeBits |= 1ull << i;
1184             }
1185         }
1186         return VK_SUCCESS;
1187     }
1188 
on_vkImportSemaphoreZirconHandleFUCHSIA(void *,VkResult,VkDevice device,const VkImportSemaphoreZirconHandleInfoFUCHSIA * pInfo)1189     VkResult on_vkImportSemaphoreZirconHandleFUCHSIA(
1190         void*, VkResult,
1191         VkDevice device,
1192         const VkImportSemaphoreZirconHandleInfoFUCHSIA* pInfo) {
1193 
1194         if (!pInfo) return VK_ERROR_INITIALIZATION_FAILED;
1195         if (!pInfo->semaphore) return VK_ERROR_INITIALIZATION_FAILED;
1196 
1197         AutoLock lock(mLock);
1198 
1199         auto deviceIt = info_VkDevice.find(device);
1200 
1201         if (deviceIt == info_VkDevice.end()) {
1202             return VK_ERROR_INITIALIZATION_FAILED;
1203         }
1204 
1205         auto semaphoreIt = info_VkSemaphore.find(pInfo->semaphore);
1206 
1207         if (semaphoreIt == info_VkSemaphore.end()) {
1208             return VK_ERROR_INITIALIZATION_FAILED;
1209         }
1210 
1211         auto& info = semaphoreIt->second;
1212 
1213         if (info.eventHandle != ZX_HANDLE_INVALID) {
1214             zx_handle_close(info.eventHandle);
1215         }
1216         info.eventHandle = pInfo->handle;
1217 
1218         return VK_SUCCESS;
1219     }
1220 
on_vkGetSemaphoreZirconHandleFUCHSIA(void *,VkResult,VkDevice device,const VkSemaphoreGetZirconHandleInfoFUCHSIA * pInfo,uint32_t * pHandle)1221     VkResult on_vkGetSemaphoreZirconHandleFUCHSIA(
1222         void*, VkResult,
1223         VkDevice device,
1224         const VkSemaphoreGetZirconHandleInfoFUCHSIA* pInfo,
1225         uint32_t* pHandle) {
1226 
1227         if (!pInfo) return VK_ERROR_INITIALIZATION_FAILED;
1228         if (!pInfo->semaphore) return VK_ERROR_INITIALIZATION_FAILED;
1229 
1230         AutoLock lock(mLock);
1231 
1232         auto deviceIt = info_VkDevice.find(device);
1233 
1234         if (deviceIt == info_VkDevice.end()) {
1235             return VK_ERROR_INITIALIZATION_FAILED;
1236         }
1237 
1238         auto semaphoreIt = info_VkSemaphore.find(pInfo->semaphore);
1239 
1240         if (semaphoreIt == info_VkSemaphore.end()) {
1241             return VK_ERROR_INITIALIZATION_FAILED;
1242         }
1243 
1244         auto& info = semaphoreIt->second;
1245 
1246         if (info.eventHandle == ZX_HANDLE_INVALID) {
1247             return VK_ERROR_INITIALIZATION_FAILED;
1248         }
1249 
1250         *pHandle = ZX_HANDLE_INVALID;
1251         zx_handle_duplicate(info.eventHandle, ZX_RIGHT_SAME_RIGHTS, pHandle);
1252         return VK_SUCCESS;
1253     }
1254 
on_vkCreateBufferCollectionFUCHSIA(void *,VkResult,VkDevice,const VkBufferCollectionCreateInfoFUCHSIA * pInfo,const VkAllocationCallbacks *,VkBufferCollectionFUCHSIA * pCollection)1255     VkResult on_vkCreateBufferCollectionFUCHSIA(
1256         void*, VkResult, VkDevice,
1257         const VkBufferCollectionCreateInfoFUCHSIA* pInfo,
1258         const VkAllocationCallbacks*,
1259         VkBufferCollectionFUCHSIA* pCollection) {
1260         fuchsia::sysmem::BufferCollectionTokenSyncPtr token;
1261         if (pInfo->collectionToken) {
1262             token.Bind(zx::channel(pInfo->collectionToken));
1263         } else {
1264             zx_status_t status = mSysmemAllocator->AllocateSharedCollection(token.NewRequest());
1265             if (status != ZX_OK) {
1266                 ALOGE("AllocateSharedCollection failed: %d", status);
1267                 return VK_ERROR_INITIALIZATION_FAILED;
1268             }
1269         }
1270         auto sysmem_collection = new fuchsia::sysmem::BufferCollectionSyncPtr;
1271         zx_status_t status = mSysmemAllocator->BindSharedCollection(
1272             std::move(token), sysmem_collection->NewRequest());
1273         if (status != ZX_OK) {
1274             ALOGE("BindSharedCollection failed: %d", status);
1275             return VK_ERROR_INITIALIZATION_FAILED;
1276         }
1277         *pCollection = reinterpret_cast<VkBufferCollectionFUCHSIA>(sysmem_collection);
1278         return VK_SUCCESS;
1279     }
1280 
on_vkDestroyBufferCollectionFUCHSIA(void *,VkResult,VkDevice,VkBufferCollectionFUCHSIA collection,const VkAllocationCallbacks *)1281     void on_vkDestroyBufferCollectionFUCHSIA(
1282         void*, VkResult, VkDevice,
1283         VkBufferCollectionFUCHSIA collection,
1284         const VkAllocationCallbacks*) {
1285         auto sysmem_collection = reinterpret_cast<fuchsia::sysmem::BufferCollectionSyncPtr*>(collection);
1286         if (sysmem_collection->is_bound()) {
1287             (*sysmem_collection)->Close();
1288         }
1289         delete sysmem_collection;
1290     }
1291 
setBufferCollectionConstraints(fuchsia::sysmem::BufferCollectionSyncPtr * collection,const VkImageCreateInfo * pImageInfo,size_t min_size_bytes)1292     void setBufferCollectionConstraints(fuchsia::sysmem::BufferCollectionSyncPtr* collection,
1293                                         const VkImageCreateInfo* pImageInfo,
1294                                         size_t min_size_bytes) {
1295         fuchsia::sysmem::BufferCollectionConstraints constraints = {};
1296         constraints.usage.vulkan = fuchsia::sysmem::vulkanUsageColorAttachment |
1297                                    fuchsia::sysmem::vulkanUsageTransferSrc |
1298                                    fuchsia::sysmem::vulkanUsageTransferDst |
1299                                    fuchsia::sysmem::vulkanUsageSampled;
1300         constraints.min_buffer_count_for_camping = 1;
1301         constraints.has_buffer_memory_constraints = true;
1302         fuchsia::sysmem::BufferMemoryConstraints& buffer_constraints =
1303             constraints.buffer_memory_constraints;
1304         buffer_constraints.min_size_bytes = min_size_bytes;
1305         buffer_constraints.max_size_bytes = 0xffffffff;
1306         buffer_constraints.physically_contiguous_required = false;
1307         buffer_constraints.secure_required = false;
1308         buffer_constraints.secure_permitted = false;
1309         buffer_constraints.ram_domain_supported = false;
1310         buffer_constraints.cpu_domain_supported = false;
1311         buffer_constraints.gpu_domain_supported = true;
1312         constraints.image_format_constraints_count = 1;
1313         fuchsia::sysmem::ImageFormatConstraints& image_constraints =
1314             constraints.image_format_constraints[0];
1315         image_constraints.pixel_format.type = fuchsia::sysmem::PixelFormatType::BGRA32;
1316         image_constraints.color_spaces_count = 1;
1317         image_constraints.color_space[0].type = fuchsia::sysmem::ColorSpaceType::SRGB;
1318         image_constraints.min_coded_width = pImageInfo->extent.width;
1319         image_constraints.max_coded_width = 0xfffffff;
1320         image_constraints.min_coded_height = pImageInfo->extent.height;
1321         image_constraints.max_coded_height = 0xffffffff;
1322         image_constraints.min_bytes_per_row = pImageInfo->extent.width * 4;
1323         image_constraints.max_bytes_per_row = 0xffffffff;
1324         image_constraints.max_coded_width_times_coded_height = 0xffffffff;
1325         image_constraints.layers = 1;
1326         image_constraints.coded_width_divisor = 1;
1327         image_constraints.coded_height_divisor = 1;
1328         image_constraints.bytes_per_row_divisor = 1;
1329         image_constraints.start_offset_divisor = 1;
1330         image_constraints.display_width_divisor = 1;
1331         image_constraints.display_height_divisor = 1;
1332 
1333         (*collection)->SetConstraints(true, constraints);
1334     }
1335 
on_vkSetBufferCollectionConstraintsFUCHSIA(void *,VkResult,VkDevice,VkBufferCollectionFUCHSIA collection,const VkImageCreateInfo * pImageInfo)1336     VkResult on_vkSetBufferCollectionConstraintsFUCHSIA(
1337         void*, VkResult, VkDevice,
1338         VkBufferCollectionFUCHSIA collection,
1339         const VkImageCreateInfo* pImageInfo) {
1340         auto sysmem_collection =
1341             reinterpret_cast<fuchsia::sysmem::BufferCollectionSyncPtr*>(collection);
1342         setBufferCollectionConstraints(
1343             sysmem_collection, pImageInfo,
1344             pImageInfo->extent.width * pImageInfo->extent.height * 4);
1345         return VK_SUCCESS;
1346     }
1347 
on_vkGetBufferCollectionPropertiesFUCHSIA(void *,VkResult,VkDevice device,VkBufferCollectionFUCHSIA collection,VkBufferCollectionPropertiesFUCHSIA * pProperties)1348     VkResult on_vkGetBufferCollectionPropertiesFUCHSIA(
1349         void*, VkResult,
1350         VkDevice device,
1351         VkBufferCollectionFUCHSIA collection,
1352         VkBufferCollectionPropertiesFUCHSIA* pProperties) {
1353         auto sysmem_collection = reinterpret_cast<fuchsia::sysmem::BufferCollectionSyncPtr*>(collection);
1354         fuchsia::sysmem::BufferCollectionInfo_2 info;
1355         zx_status_t status2;
1356         zx_status_t status = (*sysmem_collection)->WaitForBuffersAllocated(&status2, &info);
1357         if (status != ZX_OK || status2 != ZX_OK) {
1358             ALOGE("Failed wait for allocation: %d %d", status, status2);
1359             return VK_ERROR_INITIALIZATION_FAILED;
1360         }
1361         if (!info.settings.has_image_format_constraints) {
1362             return VK_ERROR_INITIALIZATION_FAILED;
1363         }
1364         pProperties->count = info.buffer_count;
1365 
1366         AutoLock lock(mLock);
1367 
1368         auto deviceIt = info_VkDevice.find(device);
1369 
1370         if (deviceIt == info_VkDevice.end()) {
1371             return VK_ERROR_INITIALIZATION_FAILED;
1372         }
1373 
1374         auto& deviceInfo = deviceIt->second;
1375 
1376         // Device local memory type supported.
1377         pProperties->memoryTypeBits = 0;
1378         for (uint32_t i = 0; i < deviceInfo.memProps.memoryTypeCount; ++i) {
1379             if (deviceInfo.memProps.memoryTypes[i].propertyFlags &
1380                 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
1381                 pProperties->memoryTypeBits |= 1ull << i;
1382             }
1383         }
1384         return VK_SUCCESS;
1385     }
1386 #endif
1387 
getOrAllocateHostMemBlockLocked(HostMemBlocks & blocks,const VkMemoryAllocateInfo * pAllocateInfo,VkEncoder * enc,VkDevice device,const VkDevice_Info & deviceInfo)1388     HostMemBlockIndex getOrAllocateHostMemBlockLocked(
1389         HostMemBlocks& blocks,
1390         const VkMemoryAllocateInfo* pAllocateInfo,
1391         VkEncoder* enc,
1392         VkDevice device,
1393         const VkDevice_Info& deviceInfo) {
1394 
1395         HostMemBlockIndex res = 0;
1396         bool found = false;
1397 
1398         while (!found) {
1399             for (HostMemBlockIndex i = 0; i < blocks.size(); ++i) {
1400                 if (blocks[i].initialized &&
1401                     blocks[i].initResult == VK_SUCCESS &&
1402                     canSubAlloc(
1403                         blocks[i].subAlloc,
1404                         pAllocateInfo->allocationSize)) {
1405                     res = i;
1406                     found = true;
1407                     return res;
1408                 }
1409             }
1410 
1411             blocks.push_back({});
1412 
1413             auto& hostMemAlloc = blocks.back();
1414 
1415             // Uninitialized block; allocate on host.
1416             static constexpr VkDeviceSize oneMb = 1048576;
1417             static constexpr VkDeviceSize kDefaultHostMemBlockSize =
1418                 16 * oneMb; // 16 mb
1419             VkDeviceSize roundedUpAllocSize =
1420                 oneMb * ((pAllocateInfo->allocationSize + oneMb - 1) / oneMb);
1421 
1422             VkDeviceSize virtualHeapSize = VIRTUAL_HOST_VISIBLE_HEAP_SIZE;
1423 
1424             VkDeviceSize blockSizeNeeded =
1425                 std::max(roundedUpAllocSize,
1426                     std::min(virtualHeapSize,
1427                              kDefaultHostMemBlockSize));
1428 
1429             VkMemoryAllocateInfo allocInfoForHost = *pAllocateInfo;
1430 
1431             allocInfoForHost.allocationSize = blockSizeNeeded;
1432 
1433             // TODO: Support dedicated/external host visible allocation
1434             allocInfoForHost.pNext = nullptr;
1435 
1436             mLock.unlock();
1437             VkResult host_res =
1438                 enc->vkAllocateMemory(
1439                     device,
1440                     &allocInfoForHost,
1441                     nullptr,
1442                     &hostMemAlloc.memory);
1443             mLock.lock();
1444 
1445             if (host_res != VK_SUCCESS) {
1446                 ALOGE("Could not allocate backing for virtual host visible memory: %d",
1447                       host_res);
1448                 hostMemAlloc.initialized = true;
1449                 hostMemAlloc.initResult = host_res;
1450                 return INVALID_HOST_MEM_BLOCK;
1451             }
1452 
1453             auto& hostMemInfo = info_VkDeviceMemory[hostMemAlloc.memory];
1454             hostMemInfo.allocationSize = allocInfoForHost.allocationSize;
1455             VkDeviceSize nonCoherentAtomSize =
1456                 deviceInfo.props.limits.nonCoherentAtomSize;
1457             hostMemInfo.mappedSize = hostMemInfo.allocationSize;
1458             hostMemInfo.memoryTypeIndex =
1459                 pAllocateInfo->memoryTypeIndex;
1460             hostMemAlloc.nonCoherentAtomSize = nonCoherentAtomSize;
1461 
1462             uint64_t directMappedAddr = 0;
1463 
1464             mLock.unlock();
1465             VkResult directMapResult =
1466                 enc->vkMapMemoryIntoAddressSpaceGOOGLE(
1467                     device, hostMemAlloc.memory, &directMappedAddr);
1468             mLock.lock();
1469 
1470             if (directMapResult != VK_SUCCESS) {
1471                 hostMemAlloc.initialized = true;
1472                 hostMemAlloc.initResult = directMapResult;
1473                 mLock.unlock();
1474                 enc->vkFreeMemory(device, hostMemAlloc.memory, nullptr);
1475                 mLock.lock();
1476                 return INVALID_HOST_MEM_BLOCK;
1477             }
1478 
1479             hostMemInfo.mappedPtr =
1480                 (uint8_t*)(uintptr_t)directMappedAddr;
1481             hostMemInfo.virtualHostVisibleBacking = true;
1482 
1483             VkResult hostMemAllocRes =
1484                 finishHostMemAllocInit(
1485                     enc,
1486                     device,
1487                     pAllocateInfo->memoryTypeIndex,
1488                     nonCoherentAtomSize,
1489                     hostMemInfo.allocationSize,
1490                     hostMemInfo.mappedSize,
1491                     hostMemInfo.mappedPtr,
1492                     &hostMemAlloc);
1493 
1494             if (hostMemAllocRes != VK_SUCCESS) {
1495                 return INVALID_HOST_MEM_BLOCK;
1496             }
1497         }
1498 
1499         // unreacheable, but we need to make Werror happy
1500         return INVALID_HOST_MEM_BLOCK;
1501     }
1502 
on_vkAllocateMemory(void * context,VkResult input_result,VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)1503     VkResult on_vkAllocateMemory(
1504         void* context,
1505         VkResult input_result,
1506         VkDevice device,
1507         const VkMemoryAllocateInfo* pAllocateInfo,
1508         const VkAllocationCallbacks* pAllocator,
1509         VkDeviceMemory* pMemory) {
1510 
1511         if (input_result != VK_SUCCESS) return input_result;
1512 
1513         VkEncoder* enc = (VkEncoder*)context;
1514 
1515         VkMemoryAllocateInfo finalAllocInfo = vk_make_orphan_copy(*pAllocateInfo);
1516         vk_struct_chain_iterator structChainIter = vk_make_chain_iterator(&finalAllocInfo);
1517 
1518         VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
1519         VkImportColorBufferGOOGLE importCbInfo = {
1520             VK_STRUCTURE_TYPE_IMPORT_COLOR_BUFFER_GOOGLE, 0,
1521         };
1522         // VkImportPhysicalAddressGOOGLE importPhysAddrInfo = {
1523         //     VK_STRUCTURE_TYPE_IMPORT_PHYSICAL_ADDRESS_GOOGLE, 0,
1524         // };
1525 
1526         const VkExportMemoryAllocateInfo* exportAllocateInfoPtr =
1527             vk_find_struct<VkExportMemoryAllocateInfo>(pAllocateInfo);
1528 
1529         const VkImportAndroidHardwareBufferInfoANDROID* importAhbInfoPtr =
1530             vk_find_struct<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo);
1531 
1532         const VkImportMemoryBufferCollectionFUCHSIA* importBufferCollectionInfoPtr =
1533             vk_find_struct<VkImportMemoryBufferCollectionFUCHSIA>(pAllocateInfo);
1534 
1535         const VkImportMemoryZirconHandleInfoFUCHSIA* importVmoInfoPtr =
1536             vk_find_struct<VkImportMemoryZirconHandleInfoFUCHSIA>(pAllocateInfo);
1537 
1538         const VkMemoryDedicatedAllocateInfo* dedicatedAllocInfoPtr =
1539             vk_find_struct<VkMemoryDedicatedAllocateInfo>(pAllocateInfo);
1540 
1541         bool shouldPassThroughDedicatedAllocInfo =
1542             !exportAllocateInfoPtr &&
1543             !importAhbInfoPtr &&
1544             !importBufferCollectionInfoPtr &&
1545             !importVmoInfoPtr &&
1546             !isHostVisibleMemoryTypeIndexForGuest(
1547                 &mHostVisibleMemoryVirtInfo,
1548                 pAllocateInfo->memoryTypeIndex);
1549 
1550         if (!exportAllocateInfoPtr &&
1551             (importAhbInfoPtr || importBufferCollectionInfoPtr || importVmoInfoPtr) &&
1552             dedicatedAllocInfoPtr &&
1553             isHostVisibleMemoryTypeIndexForGuest(
1554                 &mHostVisibleMemoryVirtInfo,
1555                 pAllocateInfo->memoryTypeIndex)) {
1556             ALOGE("FATAL: It is not yet supported to import-allocate "
1557                   "external memory that is both host visible and dedicated.");
1558             abort();
1559         }
1560 
1561         if (shouldPassThroughDedicatedAllocInfo &&
1562             dedicatedAllocInfoPtr) {
1563             dedicatedAllocInfo = vk_make_orphan_copy(*dedicatedAllocInfoPtr);
1564             vk_append_struct(&structChainIter, &dedicatedAllocInfo);
1565         }
1566 
1567         // State needed for import/export.
1568         bool exportAhb = false;
1569         bool exportVmo = false;
1570         bool importAhb = false;
1571         bool importBufferCollection = false;
1572         bool importVmo = false;
1573         (void)exportVmo;
1574 
1575         // Even if we export allocate, the underlying operation
1576         // for the host is always going to be an import operation.
1577         // This is also how Intel's implementation works,
1578         // and is generally simpler;
1579         // even in an export allocation,
1580         // we perform AHardwareBuffer allocation
1581         // on the guest side, at this layer,
1582         // and then we attach a new VkDeviceMemory
1583         // to the AHardwareBuffer on the host via an "import" operation.
1584         AHardwareBuffer* ahw = nullptr;
1585 
1586         if (exportAllocateInfoPtr) {
1587             exportAhb =
1588                 exportAllocateInfoPtr->handleTypes &
1589                 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1590             exportVmo =
1591                 exportAllocateInfoPtr->handleTypes &
1592                 VK_EXTERNAL_MEMORY_HANDLE_TYPE_TEMP_ZIRCON_VMO_BIT_FUCHSIA;
1593         } else if (importAhbInfoPtr) {
1594             importAhb = true;
1595         } else if (importBufferCollectionInfoPtr) {
1596             importBufferCollection = true;
1597         } else if (importVmoInfoPtr) {
1598             importVmo = true;
1599         }
1600 
1601         if (exportAhb) {
1602             bool hasDedicatedImage = dedicatedAllocInfoPtr &&
1603                 (dedicatedAllocInfoPtr->image != VK_NULL_HANDLE);
1604             bool hasDedicatedBuffer = dedicatedAllocInfoPtr &&
1605                 (dedicatedAllocInfoPtr->buffer != VK_NULL_HANDLE);
1606             VkExtent3D imageExtent = { 0, 0, 0 };
1607             uint32_t imageLayers = 0;
1608             VkFormat imageFormat = VK_FORMAT_UNDEFINED;
1609             VkImageUsageFlags imageUsage = 0;
1610             VkImageCreateFlags imageCreateFlags = 0;
1611             VkDeviceSize bufferSize = 0;
1612             VkDeviceSize allocationInfoAllocSize =
1613                 finalAllocInfo.allocationSize;
1614 
1615             if (hasDedicatedImage) {
1616                 AutoLock lock(mLock);
1617 
1618                 auto it = info_VkImage.find(
1619                     dedicatedAllocInfoPtr->image);
1620                 if (it == info_VkImage.end()) return VK_ERROR_INITIALIZATION_FAILED;
1621                 const auto& info = it->second;
1622                 const auto& imgCi = info.createInfo;
1623 
1624                 imageExtent = imgCi.extent;
1625                 imageLayers = imgCi.arrayLayers;
1626                 imageFormat = imgCi.format;
1627                 imageUsage = imgCi.usage;
1628                 imageCreateFlags = imgCi.flags;
1629             }
1630 
1631             if (hasDedicatedBuffer) {
1632                 AutoLock lock(mLock);
1633 
1634                 auto it = info_VkBuffer.find(
1635                     dedicatedAllocInfoPtr->buffer);
1636                 if (it == info_VkBuffer.end()) return VK_ERROR_INITIALIZATION_FAILED;
1637                 const auto& info = it->second;
1638                 const auto& bufCi = info.createInfo;
1639 
1640                 bufferSize = bufCi.size;
1641             }
1642 
1643             VkResult ahbCreateRes =
1644                 createAndroidHardwareBuffer(
1645                     hasDedicatedImage,
1646                     hasDedicatedBuffer,
1647                     imageExtent,
1648                     imageLayers,
1649                     imageFormat,
1650                     imageUsage,
1651                     imageCreateFlags,
1652                     bufferSize,
1653                     allocationInfoAllocSize,
1654                     &ahw);
1655 
1656             if (ahbCreateRes != VK_SUCCESS) {
1657                 return ahbCreateRes;
1658             }
1659         }
1660 
1661         if (importAhb) {
1662             ahw = importAhbInfoPtr->buffer;
1663             // We still need to acquire the AHardwareBuffer.
1664             importAndroidHardwareBuffer(
1665                 importAhbInfoPtr, nullptr);
1666         }
1667 
1668         if (ahw) {
1669             ALOGD("%s: Import AHardwareBulffer", __func__);
1670             const native_handle_t *handle =
1671                 AHardwareBuffer_getNativeHandle(ahw);
1672             const cb_handle_t* cb_handle =
1673                 reinterpret_cast<const cb_handle_t*>(handle);
1674             importCbInfo.colorBuffer = cb_handle->hostHandle;
1675             vk_append_struct(&structChainIter, &importCbInfo);
1676         }
1677 
1678         zx_handle_t vmo_handle = ZX_HANDLE_INVALID;
1679 
1680         if (importBufferCollection) {
1681 
1682 #ifdef VK_USE_PLATFORM_FUCHSIA
1683             auto collection = reinterpret_cast<fuchsia::sysmem::BufferCollectionSyncPtr*>(
1684                 importBufferCollectionInfoPtr->collection);
1685             fuchsia::sysmem::BufferCollectionInfo_2 info;
1686             zx_status_t status2;
1687             zx_status_t status = (*collection)->WaitForBuffersAllocated(&status2, &info);
1688             if (status != ZX_OK || status2 != ZX_OK) {
1689                 ALOGE("WaitForBuffersAllocated failed: %d %d", status);
1690                 return VK_ERROR_INITIALIZATION_FAILED;
1691             }
1692             uint32_t index = importBufferCollectionInfoPtr->index;
1693             if (info.buffer_count < index) {
1694                 ALOGE("Invalid buffer index: %d %d", index);
1695                 return VK_ERROR_INITIALIZATION_FAILED;
1696             }
1697             vmo_handle = info.buffers[index].vmo.release();
1698 #endif
1699 
1700         }
1701 
1702         if (importVmo) {
1703             vmo_handle = importVmoInfoPtr->handle;
1704         }
1705 
1706 #ifdef VK_USE_PLATFORM_FUCHSIA
1707         if (vmo_handle == ZX_HANDLE_INVALID &&
1708             !isHostVisibleMemoryTypeIndexForGuest(
1709                 &mHostVisibleMemoryVirtInfo, finalAllocInfo.memoryTypeIndex)) {
1710             bool hasDedicatedImage = dedicatedAllocInfoPtr &&
1711                 (dedicatedAllocInfoPtr->image != VK_NULL_HANDLE);
1712             VkImageCreateInfo imageCreateInfo = {};
1713 
1714             if (hasDedicatedImage) {
1715                 AutoLock lock(mLock);
1716 
1717                 auto it = info_VkImage.find(dedicatedAllocInfoPtr->image);
1718                 if (it == info_VkImage.end()) return VK_ERROR_INITIALIZATION_FAILED;
1719                 const auto& imageInfo = it->second;
1720 
1721                 imageCreateInfo = imageInfo.createInfo;
1722             }
1723 
1724             if (imageCreateInfo.usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) {
1725                 fuchsia::sysmem::BufferCollectionTokenSyncPtr token;
1726                 zx_status_t status = mSysmemAllocator->AllocateSharedCollection(
1727                     token.NewRequest());
1728                 if (status != ZX_OK) {
1729                     ALOGE("AllocateSharedCollection failed: %d", status);
1730                     abort();
1731                 }
1732 
1733                 fuchsia::sysmem::BufferCollectionSyncPtr collection;
1734                 status = mSysmemAllocator->BindSharedCollection(
1735                     std::move(token), collection.NewRequest());
1736                 if (status != ZX_OK) {
1737                     ALOGE("BindSharedCollection failed: %d", status);
1738                     abort();
1739                 }
1740                 setBufferCollectionConstraints(&collection,
1741                                                &imageCreateInfo,
1742                                                finalAllocInfo.allocationSize);
1743 
1744                 fuchsia::sysmem::BufferCollectionInfo_2 info;
1745                 zx_status_t status2;
1746                 status = collection->WaitForBuffersAllocated(&status2, &info);
1747                 if (status == ZX_OK && status2 == ZX_OK) {
1748                     if (!info.buffer_count) {
1749                       ALOGE("WaitForBuffersAllocated returned invalid count: %d", status);
1750                       abort();
1751                     }
1752                     vmo_handle = info.buffers[0].vmo.release();
1753                 } else {
1754                     ALOGE("WaitForBuffersAllocated failed: %d %d", status, status2);
1755                     abort();
1756                 }
1757 
1758                 collection->Close();
1759 
1760                 zx_handle_t vmo_copy;
1761                 status = zx_handle_duplicate(vmo_handle, ZX_RIGHT_SAME_RIGHTS, &vmo_copy);
1762                 if (status != ZX_OK) {
1763                     ALOGE("Failed to duplicate VMO: %d", status);
1764                     abort();
1765                 }
1766                 status = fuchsia_hardware_goldfish_control_DeviceCreateColorBuffer(
1767                     mControlDevice,
1768                     vmo_copy,
1769                     imageCreateInfo.extent.width,
1770                     imageCreateInfo.extent.height,
1771                     fuchsia_hardware_goldfish_control_FormatType_BGRA,
1772                     &status2);
1773                 if (status != ZX_OK || status2 != ZX_OK) {
1774                     ALOGE("CreateColorBuffer failed: %d:%d", status, status2);
1775                     abort();
1776                 }
1777             }
1778         }
1779 
1780         if (vmo_handle != ZX_HANDLE_INVALID) {
1781             zx_handle_t vmo_copy;
1782             zx_status_t status = zx_handle_duplicate(vmo_handle,
1783                                                      ZX_RIGHT_SAME_RIGHTS,
1784                                                      &vmo_copy);
1785             if (status != ZX_OK) {
1786                 ALOGE("Failed to duplicate VMO: %d", status);
1787                 abort();
1788             }
1789             zx_status_t status2 = ZX_OK;
1790             status = fuchsia_hardware_goldfish_control_DeviceGetColorBuffer(
1791                 mControlDevice, vmo_copy, &status2, &importCbInfo.colorBuffer);
1792             if (status != ZX_OK || status2 != ZX_OK) {
1793                 ALOGE("GetColorBuffer failed: %d:%d", status, status2);
1794             }
1795             vk_append_struct(&structChainIter, &importCbInfo);
1796         }
1797 #endif
1798 
1799         // TODO if (exportVmo) { }
1800 
1801         if (!isHostVisibleMemoryTypeIndexForGuest(
1802                 &mHostVisibleMemoryVirtInfo,
1803                 finalAllocInfo.memoryTypeIndex)) {
1804             input_result =
1805                 enc->vkAllocateMemory(
1806                     device, &finalAllocInfo, pAllocator, pMemory);
1807 
1808             if (input_result != VK_SUCCESS) return input_result;
1809 
1810             VkDeviceSize allocationSize = finalAllocInfo.allocationSize;
1811             setDeviceMemoryInfo(
1812                 device, *pMemory,
1813                 finalAllocInfo.allocationSize,
1814                 0, nullptr,
1815                 finalAllocInfo.memoryTypeIndex,
1816                 ahw,
1817                 vmo_handle);
1818 
1819             return VK_SUCCESS;
1820         }
1821 
1822         // Device-local memory dealing is over. What follows:
1823         // host-visible memory.
1824 
1825         if (ahw) {
1826             ALOGE("%s: Host visible export/import allocation "
1827                   "of Android hardware buffers is not supported.",
1828                   __func__);
1829             abort();
1830         }
1831 
1832         if (vmo_handle != ZX_HANDLE_INVALID) {
1833             ALOGE("%s: Host visible export/import allocation "
1834                   "of VMO is not supported yet.",
1835                   __func__);
1836             abort();
1837         }
1838 
1839         // Host visible memory, non external
1840         bool directMappingSupported = usingDirectMapping();
1841         if (!directMappingSupported) {
1842             input_result =
1843                 enc->vkAllocateMemory(
1844                     device, &finalAllocInfo, pAllocator, pMemory);
1845 
1846             if (input_result != VK_SUCCESS) return input_result;
1847 
1848             VkDeviceSize mappedSize =
1849                 getNonCoherentExtendedSize(device,
1850                     finalAllocInfo.allocationSize);
1851             uint8_t* mappedPtr = (uint8_t*)aligned_buf_alloc(4096, mappedSize);
1852             D("host visible alloc (non-direct): "
1853               "size 0x%llx host ptr %p mapped size 0x%llx",
1854               (unsigned long long)finalAllocInfo.allocationSize, mappedPtr,
1855               (unsigned long long)mappedSize);
1856             setDeviceMemoryInfo(
1857                 device, *pMemory,
1858                 finalAllocInfo.allocationSize,
1859                 mappedSize, mappedPtr,
1860                 finalAllocInfo.memoryTypeIndex);
1861             return VK_SUCCESS;
1862         }
1863 
1864         // Host visible memory with direct mapping via
1865         // VkImportPhysicalAddressGOOGLE
1866         // if (importPhysAddr) {
1867             // vkAllocateMemory(device, &finalAllocInfo, pAllocator, pMemory);
1868             //    host maps the host pointer to the guest physical address
1869             // TODO: the host side page offset of the
1870             // host pointer needs to be returned somehow.
1871         // }
1872 
1873         // Host visible memory with direct mapping
1874         AutoLock lock(mLock);
1875 
1876         auto it = info_VkDevice.find(device);
1877         if (it == info_VkDevice.end()) return VK_ERROR_DEVICE_LOST;
1878         auto& deviceInfo = it->second;
1879 
1880         auto& hostMemBlocksForTypeIndex =
1881             deviceInfo.hostMemBlocks[finalAllocInfo.memoryTypeIndex];
1882 
1883         HostMemBlockIndex blockIndex =
1884             getOrAllocateHostMemBlockLocked(
1885                 hostMemBlocksForTypeIndex,
1886                 &finalAllocInfo,
1887                 enc,
1888                 device,
1889                 deviceInfo);
1890 
1891         if (blockIndex == (HostMemBlockIndex) INVALID_HOST_MEM_BLOCK) {
1892             return VK_ERROR_OUT_OF_HOST_MEMORY;
1893         }
1894 
1895         VkDeviceMemory_Info virtualMemInfo;
1896 
1897         subAllocHostMemory(
1898             &hostMemBlocksForTypeIndex[blockIndex],
1899             &finalAllocInfo,
1900             &virtualMemInfo.subAlloc);
1901 
1902         virtualMemInfo.allocationSize = virtualMemInfo.subAlloc.subAllocSize;
1903         virtualMemInfo.mappedSize = virtualMemInfo.subAlloc.subMappedSize;
1904         virtualMemInfo.mappedPtr = virtualMemInfo.subAlloc.mappedPtr;
1905         virtualMemInfo.memoryTypeIndex = finalAllocInfo.memoryTypeIndex;
1906         virtualMemInfo.directMapped = true;
1907 
1908         D("host visible alloc (direct, suballoc): "
1909           "size 0x%llx ptr %p mapped size 0x%llx",
1910           (unsigned long long)virtualMemInfo.allocationSize, virtualMemInfo.mappedPtr,
1911           (unsigned long long)virtualMemInfo.mappedSize);
1912 
1913         info_VkDeviceMemory[
1914             virtualMemInfo.subAlloc.subMemory] = virtualMemInfo;
1915 
1916         *pMemory = virtualMemInfo.subAlloc.subMemory;
1917 
1918         return VK_SUCCESS;
1919     }
1920 
on_vkFreeMemory(void * context,VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocateInfo)1921     void on_vkFreeMemory(
1922         void* context,
1923         VkDevice device,
1924         VkDeviceMemory memory,
1925         const VkAllocationCallbacks* pAllocateInfo) {
1926 
1927         AutoLock lock(mLock);
1928 
1929         auto it = info_VkDeviceMemory.find(memory);
1930         if (it == info_VkDeviceMemory.end()) return;
1931         auto& info = it->second;
1932 
1933         if (!info.directMapped) {
1934             lock.unlock();
1935             VkEncoder* enc = (VkEncoder*)context;
1936             enc->vkFreeMemory(device, memory, pAllocateInfo);
1937             return;
1938         }
1939 
1940         subFreeHostMemory(&info.subAlloc);
1941     }
1942 
on_vkMapMemory(void *,VkResult host_result,VkDevice,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags,void ** ppData)1943     VkResult on_vkMapMemory(
1944         void*,
1945         VkResult host_result,
1946         VkDevice,
1947         VkDeviceMemory memory,
1948         VkDeviceSize offset,
1949         VkDeviceSize size,
1950         VkMemoryMapFlags,
1951         void** ppData) {
1952 
1953         if (host_result != VK_SUCCESS) return host_result;
1954 
1955         AutoLock lock(mLock);
1956 
1957         auto it = info_VkDeviceMemory.find(memory);
1958         if (it == info_VkDeviceMemory.end()) return VK_ERROR_MEMORY_MAP_FAILED;
1959 
1960         auto& info = it->second;
1961 
1962         if (!info.mappedPtr) return VK_ERROR_MEMORY_MAP_FAILED;
1963 
1964         if (size != VK_WHOLE_SIZE &&
1965             (info.mappedPtr + offset + size > info.mappedPtr + info.allocationSize)) {
1966             return VK_ERROR_MEMORY_MAP_FAILED;
1967         }
1968 
1969         *ppData = info.mappedPtr + offset;
1970 
1971         return host_result;
1972     }
1973 
on_vkUnmapMemory(void *,VkDevice,VkDeviceMemory)1974     void on_vkUnmapMemory(
1975         void*,
1976         VkDevice,
1977         VkDeviceMemory) {
1978         // no-op
1979     }
1980 
transformNonExternalResourceMemoryTypeBitsForGuest(uint32_t hostBits)1981     uint32_t transformNonExternalResourceMemoryTypeBitsForGuest(
1982         uint32_t hostBits) {
1983         uint32_t res = 0;
1984         for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) {
1985             if (isNoFlagsMemoryTypeIndexForGuest(
1986                     &mHostVisibleMemoryVirtInfo, i)) continue;
1987             if (hostBits & (1 << i)) {
1988                 res |= (1 << i);
1989             }
1990         }
1991         return res;
1992     }
1993 
transformExternalResourceMemoryTypeBitsForGuest(uint32_t normalBits)1994     uint32_t transformExternalResourceMemoryTypeBitsForGuest(
1995         uint32_t normalBits) {
1996         uint32_t res = 0;
1997         for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) {
1998             if (isNoFlagsMemoryTypeIndexForGuest(
1999                     &mHostVisibleMemoryVirtInfo, i)) continue;
2000             if (normalBits & (1 << i) &&
2001                 !isHostVisibleMemoryTypeIndexForGuest(
2002                     &mHostVisibleMemoryVirtInfo, i)) {
2003                 res |= (1 << i);
2004             }
2005         }
2006         return res;
2007     }
2008 
transformNonExternalResourceMemoryRequirementsForGuest(VkMemoryRequirements * reqs)2009     void transformNonExternalResourceMemoryRequirementsForGuest(
2010         VkMemoryRequirements* reqs) {
2011         reqs->memoryTypeBits =
2012             transformNonExternalResourceMemoryTypeBitsForGuest(
2013                 reqs->memoryTypeBits);
2014     }
2015 
transformExternalResourceMemoryRequirementsForGuest(VkMemoryRequirements * reqs)2016     void transformExternalResourceMemoryRequirementsForGuest(
2017         VkMemoryRequirements* reqs) {
2018         reqs->memoryTypeBits =
2019             transformExternalResourceMemoryTypeBitsForGuest(
2020                 reqs->memoryTypeBits);
2021     }
2022 
transformExternalResourceMemoryDedicatedRequirementsForGuest(VkMemoryDedicatedRequirements * dedicatedReqs)2023     void transformExternalResourceMemoryDedicatedRequirementsForGuest(
2024         VkMemoryDedicatedRequirements* dedicatedReqs) {
2025         dedicatedReqs->prefersDedicatedAllocation = VK_TRUE;
2026         dedicatedReqs->requiresDedicatedAllocation = VK_TRUE;
2027     }
2028 
transformImageMemoryRequirementsForGuest(VkImage image,VkMemoryRequirements * reqs)2029     void transformImageMemoryRequirementsForGuest(
2030         VkImage image,
2031         VkMemoryRequirements* reqs) {
2032 
2033         AutoLock lock(mLock);
2034 
2035         auto it = info_VkImage.find(image);
2036         if (it == info_VkImage.end()) return;
2037 
2038         auto& info = it->second;
2039 
2040         if (!info.external ||
2041             !info.externalCreateInfo.handleTypes) {
2042             transformNonExternalResourceMemoryRequirementsForGuest(reqs);
2043             return;
2044         }
2045 
2046         transformExternalResourceMemoryRequirementsForGuest(reqs);
2047     }
2048 
transformBufferMemoryRequirementsForGuest(VkBuffer buffer,VkMemoryRequirements * reqs)2049     void transformBufferMemoryRequirementsForGuest(
2050         VkBuffer buffer,
2051         VkMemoryRequirements* reqs) {
2052 
2053         AutoLock lock(mLock);
2054 
2055         auto it = info_VkBuffer.find(buffer);
2056         if (it == info_VkBuffer.end()) return;
2057 
2058         auto& info = it->second;
2059 
2060         if (!info.external ||
2061             !info.externalCreateInfo.handleTypes) {
2062             transformNonExternalResourceMemoryRequirementsForGuest(reqs);
2063             return;
2064         }
2065 
2066         transformExternalResourceMemoryRequirementsForGuest(reqs);
2067     }
2068 
transformImageMemoryRequirements2ForGuest(VkImage image,VkMemoryRequirements2 * reqs2)2069     void transformImageMemoryRequirements2ForGuest(
2070         VkImage image,
2071         VkMemoryRequirements2* reqs2) {
2072 
2073         AutoLock lock(mLock);
2074 
2075         auto it = info_VkImage.find(image);
2076         if (it == info_VkImage.end()) return;
2077 
2078         auto& info = it->second;
2079 
2080         if (!info.external ||
2081             !info.externalCreateInfo.handleTypes) {
2082             transformNonExternalResourceMemoryRequirementsForGuest(
2083                 &reqs2->memoryRequirements);
2084             return;
2085         }
2086 
2087         transformExternalResourceMemoryRequirementsForGuest(&reqs2->memoryRequirements);
2088 
2089         VkMemoryDedicatedRequirements* dedicatedReqs =
2090             vk_find_struct<VkMemoryDedicatedRequirements>(reqs2);
2091 
2092         if (!dedicatedReqs) return;
2093 
2094         transformExternalResourceMemoryDedicatedRequirementsForGuest(
2095             dedicatedReqs);
2096     }
2097 
transformBufferMemoryRequirements2ForGuest(VkBuffer buffer,VkMemoryRequirements2 * reqs2)2098     void transformBufferMemoryRequirements2ForGuest(
2099         VkBuffer buffer,
2100         VkMemoryRequirements2* reqs2) {
2101 
2102         AutoLock lock(mLock);
2103 
2104         auto it = info_VkBuffer.find(buffer);
2105         if (it == info_VkBuffer.end()) return;
2106 
2107         auto& info = it->second;
2108 
2109         if (!info.external ||
2110             !info.externalCreateInfo.handleTypes) {
2111             transformNonExternalResourceMemoryRequirementsForGuest(
2112                 &reqs2->memoryRequirements);
2113             return;
2114         }
2115 
2116         transformExternalResourceMemoryRequirementsForGuest(&reqs2->memoryRequirements);
2117 
2118         VkMemoryDedicatedRequirements* dedicatedReqs =
2119             vk_find_struct<VkMemoryDedicatedRequirements>(reqs2);
2120 
2121         if (!dedicatedReqs) return;
2122 
2123         transformExternalResourceMemoryDedicatedRequirementsForGuest(
2124             dedicatedReqs);
2125     }
2126 
on_vkCreateImage(void * context,VkResult,VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)2127     VkResult on_vkCreateImage(
2128         void* context, VkResult,
2129         VkDevice device, const VkImageCreateInfo *pCreateInfo,
2130         const VkAllocationCallbacks *pAllocator,
2131         VkImage *pImage) {
2132         VkEncoder* enc = (VkEncoder*)context;
2133 
2134         VkImageCreateInfo localCreateInfo = vk_make_orphan_copy(*pCreateInfo);
2135         vk_struct_chain_iterator structChainIter = vk_make_chain_iterator(&localCreateInfo);
2136         VkExternalMemoryImageCreateInfo localExtImgCi;
2137 
2138         const VkExternalMemoryImageCreateInfo* extImgCiPtr =
2139             vk_find_struct<VkExternalMemoryImageCreateInfo>(pCreateInfo);
2140         if (extImgCiPtr) {
2141             localExtImgCi = vk_make_orphan_copy(*extImgCiPtr);
2142             vk_append_struct(&structChainIter, &localExtImgCi);
2143         }
2144 
2145 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2146         VkNativeBufferANDROID localAnb;
2147         const VkNativeBufferANDROID* anbInfoPtr =
2148             vk_find_struct<VkNativeBufferANDROID>(pCreateInfo);
2149         if (anbInfoPtr) {
2150             localAnb = vk_make_orphan_copy(*anbInfoPtr);
2151             vk_append_struct(&structChainIter, &localAnb);
2152         }
2153 
2154         VkExternalFormatANDROID localExtFormatAndroid;
2155         const VkExternalFormatANDROID* extFormatAndroidPtr =
2156             vk_find_struct<VkExternalFormatANDROID>(pCreateInfo);
2157         if (extFormatAndroidPtr) {
2158             localExtFormatAndroid = vk_make_orphan_copy(*extFormatAndroidPtr);
2159 
2160             // Do not append external format android;
2161             // instead, replace the local image localCreateInfo format
2162             // with the corresponding Vulkan format
2163             if (extFormatAndroidPtr->externalFormat) {
2164                 localCreateInfo.format =
2165                     vk_format_from_android(extFormatAndroidPtr->externalFormat);
2166                 if (localCreateInfo.format == VK_FORMAT_UNDEFINED)
2167                     return VK_ERROR_VALIDATION_FAILED_EXT;
2168             }
2169         }
2170 #endif
2171 
2172 #ifdef VK_USE_PLATFORM_FUCHSIA
2173         const VkBufferCollectionImageCreateInfoFUCHSIA* extBufferCollectionPtr =
2174             vk_find_struct<VkBufferCollectionImageCreateInfoFUCHSIA>(pCreateInfo);
2175         if (extBufferCollectionPtr) {
2176             auto collection = reinterpret_cast<fuchsia::sysmem::BufferCollectionSyncPtr*>(
2177                 extBufferCollectionPtr->collection);
2178             uint32_t index = extBufferCollectionPtr->index;
2179             zx_handle_t vmo_handle = ZX_HANDLE_INVALID;
2180 
2181             fuchsia::sysmem::BufferCollectionInfo_2 info;
2182             zx_status_t status2;
2183             zx_status_t status = (*collection)->WaitForBuffersAllocated(&status2, &info);
2184             if (status == ZX_OK && status2 == ZX_OK) {
2185                 if (index < info.buffer_count) {
2186                     vmo_handle = info.buffers[index].vmo.release();
2187                 }
2188             } else {
2189                 ALOGE("WaitForBuffersAllocated failed: %d %d", status, status2);
2190             }
2191 
2192             if (vmo_handle != ZX_HANDLE_INVALID) {
2193                 zx_status_t status2 = ZX_OK;
2194                 status = fuchsia_hardware_goldfish_control_DeviceCreateColorBuffer(
2195                     mControlDevice,
2196                     vmo_handle,
2197                     localCreateInfo.extent.width,
2198                     localCreateInfo.extent.height,
2199                     fuchsia_hardware_goldfish_control_FormatType_BGRA,
2200                     &status2);
2201                 if (status != ZX_OK || status2 != ZX_OK) {
2202                     ALOGE("CreateColorBuffer failed: %d:%d", status, status2);
2203                 }
2204             }
2205         }
2206 
2207         // Allow external memory for all color attachments on fuchsia.
2208         if (localCreateInfo.usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) {
2209             if (!extImgCiPtr) {
2210                 localExtImgCi.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO;
2211                 localExtImgCi.pNext = nullptr;
2212                 localExtImgCi.handleTypes = ~0; // handle type just needs to be non-zero
2213                 extImgCiPtr = &localExtImgCi;   // no vk_append_struct required
2214             }
2215         }
2216 #endif
2217 
2218         VkResult res = enc->vkCreateImage(device, &localCreateInfo, pAllocator, pImage);
2219 
2220         if (res != VK_SUCCESS) return res;
2221 
2222         AutoLock lock(mLock);
2223 
2224         auto it = info_VkImage.find(*pImage);
2225         if (it == info_VkImage.end()) return VK_ERROR_INITIALIZATION_FAILED;
2226 
2227         auto& info = it->second;
2228 
2229         info.device = device;
2230         info.createInfo = *pCreateInfo;
2231         info.createInfo.pNext = nullptr;
2232 
2233         if (!extImgCiPtr) return res;
2234 
2235         info.external = true;
2236         info.externalCreateInfo = *extImgCiPtr;
2237 
2238         return res;
2239     }
2240 
on_vkCreateSamplerYcbcrConversion(void * context,VkResult,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)2241     VkResult on_vkCreateSamplerYcbcrConversion(
2242         void* context, VkResult,
2243         VkDevice device,
2244         const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
2245         const VkAllocationCallbacks* pAllocator,
2246         VkSamplerYcbcrConversion* pYcbcrConversion) {
2247 
2248         VkSamplerYcbcrConversionCreateInfo localCreateInfo = vk_make_orphan_copy(*pCreateInfo);
2249 
2250 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2251         const VkExternalFormatANDROID* extFormatAndroidPtr =
2252             vk_find_struct<VkExternalFormatANDROID>(pCreateInfo);
2253         if (extFormatAndroidPtr) {
2254             if (extFormatAndroidPtr->externalFormat) {
2255                 localCreateInfo.format =
2256                     vk_format_from_android(extFormatAndroidPtr->externalFormat);
2257             }
2258         }
2259 #endif
2260 
2261         VkEncoder* enc = (VkEncoder*)context;
2262         return enc->vkCreateSamplerYcbcrConversion(
2263             device, &localCreateInfo, pAllocator, pYcbcrConversion);
2264     }
2265 
on_vkCreateSamplerYcbcrConversionKHR(void * context,VkResult,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)2266     VkResult on_vkCreateSamplerYcbcrConversionKHR(
2267         void* context, VkResult,
2268         VkDevice device,
2269         const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
2270         const VkAllocationCallbacks* pAllocator,
2271         VkSamplerYcbcrConversion* pYcbcrConversion) {
2272 
2273         VkSamplerYcbcrConversionCreateInfo localCreateInfo = vk_make_orphan_copy(*pCreateInfo);
2274 
2275 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2276         const VkExternalFormatANDROID* extFormatAndroidPtr =
2277             vk_find_struct<VkExternalFormatANDROID>(pCreateInfo);
2278         if (extFormatAndroidPtr) {
2279             if (extFormatAndroidPtr->externalFormat) {
2280                 localCreateInfo.format =
2281                     vk_format_from_android(extFormatAndroidPtr->externalFormat);
2282             }
2283         }
2284 #endif
2285 
2286         VkEncoder* enc = (VkEncoder*)context;
2287         return enc->vkCreateSamplerYcbcrConversionKHR(
2288             device, &localCreateInfo, pAllocator, pYcbcrConversion);
2289     }
2290 
on_vkDestroyImage(void * context,VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)2291     void on_vkDestroyImage(
2292         void* context,
2293         VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
2294         VkEncoder* enc = (VkEncoder*)context;
2295         enc->vkDestroyImage(device, image, pAllocator);
2296     }
2297 
on_vkGetImageMemoryRequirements(void * context,VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)2298     void on_vkGetImageMemoryRequirements(
2299         void *context, VkDevice device, VkImage image,
2300         VkMemoryRequirements *pMemoryRequirements) {
2301         VkEncoder* enc = (VkEncoder*)context;
2302         enc->vkGetImageMemoryRequirements(
2303             device, image, pMemoryRequirements);
2304         transformImageMemoryRequirementsForGuest(
2305             image, pMemoryRequirements);
2306     }
2307 
on_vkGetImageMemoryRequirements2(void * context,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)2308     void on_vkGetImageMemoryRequirements2(
2309         void *context, VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2310         VkMemoryRequirements2 *pMemoryRequirements) {
2311         VkEncoder* enc = (VkEncoder*)context;
2312         enc->vkGetImageMemoryRequirements2(
2313             device, pInfo, pMemoryRequirements);
2314         transformImageMemoryRequirements2ForGuest(
2315             pInfo->image, pMemoryRequirements);
2316     }
2317 
on_vkGetImageMemoryRequirements2KHR(void * context,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)2318     void on_vkGetImageMemoryRequirements2KHR(
2319         void *context, VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2320         VkMemoryRequirements2 *pMemoryRequirements) {
2321         VkEncoder* enc = (VkEncoder*)context;
2322         enc->vkGetImageMemoryRequirements2KHR(
2323             device, pInfo, pMemoryRequirements);
2324         transformImageMemoryRequirements2ForGuest(
2325             pInfo->image, pMemoryRequirements);
2326     }
2327 
on_vkBindImageMemory(void * context,VkResult,VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)2328     VkResult on_vkBindImageMemory(
2329         void* context, VkResult,
2330         VkDevice device, VkImage image, VkDeviceMemory memory,
2331         VkDeviceSize memoryOffset) {
2332         VkEncoder* enc = (VkEncoder*)context;
2333         return enc->vkBindImageMemory(device, image, memory, memoryOffset);
2334     }
2335 
on_vkBindImageMemory2(void * context,VkResult,VkDevice device,uint32_t bindingCount,const VkBindImageMemoryInfo * pBindInfos)2336     VkResult on_vkBindImageMemory2(
2337         void* context, VkResult,
2338         VkDevice device, uint32_t bindingCount, const VkBindImageMemoryInfo* pBindInfos) {
2339         VkEncoder* enc = (VkEncoder*)context;
2340         return enc->vkBindImageMemory2(device, bindingCount, pBindInfos);
2341     }
2342 
on_vkBindImageMemory2KHR(void * context,VkResult,VkDevice device,uint32_t bindingCount,const VkBindImageMemoryInfo * pBindInfos)2343     VkResult on_vkBindImageMemory2KHR(
2344         void* context, VkResult,
2345         VkDevice device, uint32_t bindingCount, const VkBindImageMemoryInfo* pBindInfos) {
2346         VkEncoder* enc = (VkEncoder*)context;
2347         return enc->vkBindImageMemory2KHR(device, bindingCount, pBindInfos);
2348     }
2349 
on_vkCreateBuffer(void * context,VkResult,VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)2350     VkResult on_vkCreateBuffer(
2351         void* context, VkResult,
2352         VkDevice device, const VkBufferCreateInfo *pCreateInfo,
2353         const VkAllocationCallbacks *pAllocator,
2354         VkBuffer *pBuffer) {
2355         VkEncoder* enc = (VkEncoder*)context;
2356 
2357         VkResult res = enc->vkCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
2358 
2359         if (res != VK_SUCCESS) return res;
2360 
2361         AutoLock lock(mLock);
2362 
2363         auto it = info_VkBuffer.find(*pBuffer);
2364         if (it == info_VkBuffer.end()) return VK_ERROR_INITIALIZATION_FAILED;
2365 
2366         auto& info = it->second;
2367 
2368         info.createInfo = *pCreateInfo;
2369         info.createInfo.pNext = nullptr;
2370 
2371         const VkExternalMemoryBufferCreateInfo* extBufCi =
2372             vk_find_struct<VkExternalMemoryBufferCreateInfo>(pCreateInfo);
2373 
2374         if (!extBufCi) return res;
2375 
2376         info.external = true;
2377         info.externalCreateInfo = *extBufCi;
2378 
2379         return res;
2380     }
2381 
on_vkDestroyBuffer(void * context,VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)2382     void on_vkDestroyBuffer(
2383         void* context,
2384         VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
2385         VkEncoder* enc = (VkEncoder*)context;
2386         enc->vkDestroyBuffer(device, buffer, pAllocator);
2387     }
2388 
on_vkGetBufferMemoryRequirements(void * context,VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)2389     void on_vkGetBufferMemoryRequirements(
2390         void* context, VkDevice device, VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
2391         VkEncoder* enc = (VkEncoder*)context;
2392         enc->vkGetBufferMemoryRequirements(
2393             device, buffer, pMemoryRequirements);
2394         transformBufferMemoryRequirementsForGuest(
2395             buffer, pMemoryRequirements);
2396     }
2397 
on_vkGetBufferMemoryRequirements2(void * context,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)2398     void on_vkGetBufferMemoryRequirements2(
2399         void* context, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
2400         VkMemoryRequirements2* pMemoryRequirements) {
2401         VkEncoder* enc = (VkEncoder*)context;
2402         enc->vkGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
2403         transformBufferMemoryRequirements2ForGuest(
2404             pInfo->buffer, pMemoryRequirements);
2405     }
2406 
on_vkGetBufferMemoryRequirements2KHR(void * context,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)2407     void on_vkGetBufferMemoryRequirements2KHR(
2408         void* context, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
2409         VkMemoryRequirements2* pMemoryRequirements) {
2410         VkEncoder* enc = (VkEncoder*)context;
2411         enc->vkGetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
2412         transformBufferMemoryRequirements2ForGuest(
2413             pInfo->buffer, pMemoryRequirements);
2414     }
2415 
on_vkBindBufferMemory(void * context,VkResult,VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)2416     VkResult on_vkBindBufferMemory(
2417         void *context, VkResult,
2418         VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) {
2419         VkEncoder *enc = (VkEncoder *)context;
2420         return enc->vkBindBufferMemory(
2421             device, buffer, memory, memoryOffset);
2422     }
2423 
on_vkBindBufferMemory2(void * context,VkResult,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)2424     VkResult on_vkBindBufferMemory2(
2425         void *context, VkResult,
2426         VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos) {
2427         VkEncoder *enc = (VkEncoder *)context;
2428         return enc->vkBindBufferMemory2(
2429             device, bindInfoCount, pBindInfos);
2430     }
2431 
on_vkBindBufferMemory2KHR(void * context,VkResult,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)2432     VkResult on_vkBindBufferMemory2KHR(
2433         void *context, VkResult,
2434         VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos) {
2435         VkEncoder *enc = (VkEncoder *)context;
2436         return enc->vkBindBufferMemory2KHR(
2437             device, bindInfoCount, pBindInfos);
2438     }
2439 
ensureSyncDeviceFd()2440     void ensureSyncDeviceFd() {
2441         if (mSyncDeviceFd >= 0) return;
2442 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2443         mSyncDeviceFd = goldfish_sync_open();
2444         if (mSyncDeviceFd >= 0) {
2445             ALOGD("%s: created sync device for current Vulkan process: %d\n", __func__, mSyncDeviceFd);
2446         } else {
2447             ALOGD("%s: failed to create sync device for current Vulkan process\n", __func__);
2448         }
2449 #endif
2450     }
2451 
on_vkCreateSemaphore(void * context,VkResult input_result,VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)2452     VkResult on_vkCreateSemaphore(
2453         void* context, VkResult input_result,
2454         VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo,
2455         const VkAllocationCallbacks* pAllocator,
2456         VkSemaphore* pSemaphore) {
2457 
2458         VkEncoder* enc = (VkEncoder*)context;
2459 
2460         VkSemaphoreCreateInfo finalCreateInfo = *pCreateInfo;
2461 
2462         const VkExportSemaphoreCreateInfoKHR* exportSemaphoreInfoPtr =
2463             vk_find_struct<VkExportSemaphoreCreateInfoKHR>(pCreateInfo);
2464 
2465 #ifdef VK_USE_PLATFORM_FUCHSIA
2466         bool exportEvent = exportSemaphoreInfoPtr &&
2467             (exportSemaphoreInfoPtr->handleTypes &
2468              VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_TEMP_ZIRCON_EVENT_BIT_FUCHSIA);
2469 
2470         if (exportEvent) {
2471             finalCreateInfo.pNext = nullptr;
2472         }
2473 #endif
2474 
2475 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2476         bool exportSyncFd = exportSemaphoreInfoPtr &&
2477             (exportSemaphoreInfoPtr->handleTypes &
2478              VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT);
2479 
2480         if (exportSyncFd) {
2481             finalCreateInfo.pNext = nullptr;
2482         }
2483 #endif
2484         input_result = enc->vkCreateSemaphore(
2485             device, &finalCreateInfo, pAllocator, pSemaphore);
2486 
2487         zx_handle_t event_handle = ZX_HANDLE_INVALID;
2488 
2489 #ifdef VK_USE_PLATFORM_FUCHSIA
2490         if (exportEvent) {
2491             zx_event_create(0, &event_handle);
2492         }
2493 #endif
2494 
2495         AutoLock lock(mLock);
2496 
2497         auto it = info_VkSemaphore.find(*pSemaphore);
2498         if (it == info_VkSemaphore.end()) return VK_ERROR_INITIALIZATION_FAILED;
2499 
2500         auto& info = it->second;
2501 
2502         info.device = device;
2503         info.eventHandle = event_handle;
2504 
2505 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2506         if (exportSyncFd) {
2507 
2508             ensureSyncDeviceFd();
2509 
2510             if (exportSyncFd) {
2511                 int syncFd = -1;
2512                 goldfish_sync_queue_work(
2513                     mSyncDeviceFd,
2514                     get_host_u64_VkSemaphore(*pSemaphore) /* the handle */,
2515                     GOLDFISH_SYNC_VULKAN_SEMAPHORE_SYNC /* thread handle (doubling as type field) */,
2516                     &syncFd);
2517                 info.syncFd = syncFd;
2518             }
2519         }
2520 #endif
2521 
2522         return VK_SUCCESS;
2523     }
2524 
on_vkDestroySemaphore(void * context,VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)2525     void on_vkDestroySemaphore(
2526         void* context,
2527         VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) {
2528         VkEncoder* enc = (VkEncoder*)context;
2529         enc->vkDestroySemaphore(device, semaphore, pAllocator);
2530     }
2531 
2532     // https://www.khronos.org/registry/vulkan/specs/1.0-extensions/html/vkspec.html#vkGetSemaphoreFdKHR
2533     // Each call to vkGetSemaphoreFdKHR must create a new file descriptor and transfer ownership
2534     // of it to the application. To avoid leaking resources, the application must release ownership
2535     // of the file descriptor when it is no longer needed.
on_vkGetSemaphoreFdKHR(void * context,VkResult,VkDevice device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)2536     VkResult on_vkGetSemaphoreFdKHR(
2537         void* context, VkResult,
2538         VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
2539         int* pFd) {
2540 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2541         VkEncoder* enc = (VkEncoder*)context;
2542         bool getSyncFd =
2543             pGetFdInfo->handleType & VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
2544 
2545         if (getSyncFd) {
2546             AutoLock lock(mLock);
2547             auto it = info_VkSemaphore.find(pGetFdInfo->semaphore);
2548             if (it == info_VkSemaphore.end()) return VK_ERROR_OUT_OF_HOST_MEMORY;
2549             auto& semInfo = it->second;
2550             *pFd = dup(semInfo.syncFd);
2551             return VK_SUCCESS;
2552         } else {
2553             // opaque fd
2554             int hostFd = 0;
2555             VkResult result = enc->vkGetSemaphoreFdKHR(device, pGetFdInfo, &hostFd);
2556             if (result != VK_SUCCESS) {
2557                 return result;
2558             }
2559             *pFd = memfd_create("vk_opaque_fd", 0);
2560             write(*pFd, &hostFd, sizeof(hostFd));
2561             return VK_SUCCESS;
2562         }
2563 #else
2564         (void)context;
2565         (void)device;
2566         (void)pGetFdInfo;
2567         (void)pFd;
2568         return VK_ERROR_INCOMPATIBLE_DRIVER;
2569 #endif
2570     }
2571 
on_vkImportSemaphoreFdKHR(void * context,VkResult input_result,VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo)2572     VkResult on_vkImportSemaphoreFdKHR(
2573         void* context, VkResult input_result,
2574         VkDevice device,
2575         const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
2576 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2577         VkEncoder* enc = (VkEncoder*)context;
2578         if (input_result != VK_SUCCESS) {
2579             return input_result;
2580         }
2581 
2582         if (pImportSemaphoreFdInfo->handleType &
2583             VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT) {
2584             VkImportSemaphoreFdInfoKHR tmpInfo = *pImportSemaphoreFdInfo;
2585 
2586             AutoLock lock(mLock);
2587 
2588             auto semaphoreIt = info_VkSemaphore.find(pImportSemaphoreFdInfo->semaphore);
2589             auto& info = semaphoreIt->second;
2590 
2591             if (info.syncFd >= 0) {
2592                 close(info.syncFd);
2593             }
2594 
2595             info.syncFd = pImportSemaphoreFdInfo->fd;
2596 
2597             return VK_SUCCESS;
2598         } else {
2599             int fd = pImportSemaphoreFdInfo->fd;
2600             int err = lseek(fd, 0, SEEK_SET);
2601             if (err == -1) {
2602                 ALOGE("lseek fail on import semaphore");
2603             }
2604             int hostFd = 0;
2605             read(fd, &hostFd, sizeof(hostFd));
2606             VkImportSemaphoreFdInfoKHR tmpInfo = *pImportSemaphoreFdInfo;
2607             tmpInfo.fd = hostFd;
2608             VkResult result = enc->vkImportSemaphoreFdKHR(device, &tmpInfo);
2609             close(fd);
2610             return result;
2611         }
2612 #else
2613         (void)context;
2614         (void)input_result;
2615         (void)device;
2616         (void)pImportSemaphoreFdInfo;
2617         return VK_ERROR_INCOMPATIBLE_DRIVER;
2618 #endif
2619     }
2620 
on_vkQueueSubmit(void * context,VkResult input_result,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)2621     VkResult on_vkQueueSubmit(
2622         void* context, VkResult input_result,
2623         VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence) {
2624 
2625         std::vector<VkSemaphore> pre_signal_semaphores;
2626         std::vector<zx_handle_t> post_wait_events;
2627         std::vector<int> post_wait_sync_fds;
2628         VkDevice device = VK_NULL_HANDLE;
2629         VkFence* pFence = nullptr;
2630 
2631         VkEncoder* enc = (VkEncoder*)context;
2632 
2633         AutoLock lock(mLock);
2634 
2635         for (uint32_t i = 0; i < submitCount; ++i) {
2636             for (uint32_t j = 0; j < pSubmits[i].waitSemaphoreCount; ++j) {
2637                 auto it = info_VkSemaphore.find(pSubmits[i].pWaitSemaphores[j]);
2638                 if (it != info_VkSemaphore.end()) {
2639                     auto& semInfo = it->second;
2640 #ifdef VK_USE_PLATFORM_FUCHSIA
2641                     if (semInfo.eventHandle) {
2642                         // Wait here instead of passing semaphore to host.
2643                         zx_object_wait_one(semInfo.eventHandle,
2644                                            ZX_EVENT_SIGNALED,
2645                                            ZX_TIME_INFINITE,
2646                                            nullptr);
2647                         pre_signal_semaphores.push_back(pSubmits[i].pWaitSemaphores[j]);
2648                     }
2649 #endif
2650 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2651                     if (semInfo.syncFd >= 0) {
2652                         // Wait here instead of passing semaphore to host.
2653                         sync_wait(semInfo.syncFd, 3000);
2654                         pre_signal_semaphores.push_back(pSubmits[i].pWaitSemaphores[j]);
2655                     }
2656 #endif
2657                 }
2658             }
2659             for (uint32_t j = 0; j < pSubmits[i].signalSemaphoreCount; ++j) {
2660                 auto it = info_VkSemaphore.find(pSubmits[i].pSignalSemaphores[j]);
2661                 if (it != info_VkSemaphore.end()) {
2662                     auto& semInfo = it->second;
2663 #ifdef VK_USE_PLATFORM_FUCHSIA
2664                     if (semInfo.eventHandle) {
2665                         post_wait_events.push_back(semInfo.eventHandle);
2666                         device = semInfo.device;
2667                         pFence = &info_VkDevice[device].fence;
2668                     }
2669 #endif
2670 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2671                     if (semInfo.syncFd >= 0) {
2672                         post_wait_sync_fds.push_back(semInfo.syncFd);
2673                         device = semInfo.device;
2674                         pFence = &info_VkDevice[device].fence;
2675                     }
2676 #endif
2677                 }
2678             }
2679         }
2680         lock.unlock();
2681 
2682         if (!pre_signal_semaphores.empty()) {
2683             VkSubmitInfo submit_info = {
2684                 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
2685                 .waitSemaphoreCount = 0,
2686                 .pWaitSemaphores = nullptr,
2687                 .pWaitDstStageMask = nullptr,
2688                 .signalSemaphoreCount = static_cast<uint32_t>(pre_signal_semaphores.size()),
2689                 .pSignalSemaphores = pre_signal_semaphores.data()};
2690             enc->vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
2691         }
2692 
2693         input_result = enc->vkQueueSubmit(queue, submitCount, pSubmits, fence);
2694 
2695         if (input_result != VK_SUCCESS) return input_result;
2696 
2697         if (post_wait_events.empty())
2698             return VK_SUCCESS;
2699 
2700         if (*pFence == VK_NULL_HANDLE) {
2701             VkFenceCreateInfo fence_create_info = {
2702                 VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, 0, 0,
2703             };
2704             enc->vkCreateFence(device, &fence_create_info, nullptr, pFence);
2705         }
2706         enc->vkQueueSubmit(queue, 0, nullptr, *pFence);
2707         static constexpr uint64_t MAX_WAIT_NS =
2708             5ULL * 1000ULL * 1000ULL * 1000ULL;
2709         enc->vkWaitForFences(device, 1, pFence, VK_TRUE, MAX_WAIT_NS);
2710         enc->vkResetFences(device, 1, pFence);
2711 
2712 #ifdef VK_USE_PLATFORM_FUCHSIA
2713         for (auto& event : post_wait_events) {
2714             zx_object_signal(event, 0, ZX_EVENT_SIGNALED);
2715         }
2716 #endif
2717 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2718         for (auto& fd : post_wait_sync_fds) {
2719             goldfish_sync_signal(fd);
2720         }
2721 #endif
2722 
2723         return VK_SUCCESS;
2724     }
2725 
unwrap_VkNativeBufferANDROID(const VkImageCreateInfo * pCreateInfo,VkImageCreateInfo * local_pCreateInfo)2726     void unwrap_VkNativeBufferANDROID(
2727         const VkImageCreateInfo* pCreateInfo,
2728         VkImageCreateInfo* local_pCreateInfo) {
2729 
2730         if (!pCreateInfo->pNext) return;
2731 
2732         const VkNativeBufferANDROID* nativeInfo =
2733             reinterpret_cast<const VkNativeBufferANDROID*>(pCreateInfo->pNext);
2734 
2735         if (VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID != nativeInfo->sType) {
2736             return;
2737         }
2738 
2739         const cb_handle_t* cb_handle =
2740             reinterpret_cast<const cb_handle_t*>(nativeInfo->handle);
2741 
2742         if (!cb_handle) return;
2743 
2744         VkNativeBufferANDROID* nativeInfoOut =
2745             reinterpret_cast<VkNativeBufferANDROID*>(
2746                 const_cast<void*>(
2747                     local_pCreateInfo->pNext));
2748 
2749         if (!nativeInfoOut->handle) {
2750             ALOGE("FATAL: Local native buffer info not properly allocated!");
2751             abort();
2752         }
2753 
2754         *(uint32_t*)(nativeInfoOut->handle) = cb_handle->hostHandle;
2755     }
2756 
unwrap_vkAcquireImageANDROID_nativeFenceFd(int fd,int *)2757     void unwrap_vkAcquireImageANDROID_nativeFenceFd(int fd, int*) {
2758         if (fd != -1) {
2759             sync_wait(fd, 3000);
2760         }
2761     }
2762 
2763     // Action of vkMapMemoryIntoAddressSpaceGOOGLE:
2764     // 1. preprocess (on_vkMapMemoryIntoAddressSpaceGOOGLE_pre):
2765     //    uses address space device to reserve the right size of
2766     //    memory.
2767     // 2. the reservation results in a physical address. the physical
2768     //    address is set as |*pAddress|.
2769     // 3. after pre, the API call is encoded to the host, where the
2770     //    value of pAddress is also sent (the physical address).
2771     // 4. the host will obtain the actual gpu pointer and send it
2772     //    back out in |*pAddress|.
2773     // 5. postprocess (on_vkMapMemoryIntoAddressSpaceGOOGLE) will run,
2774     //    using the mmap() method of GoldfishAddressSpaceBlock to obtain
2775     //    a pointer in guest userspace corresponding to the host pointer.
on_vkMapMemoryIntoAddressSpaceGOOGLE_pre(void *,VkResult,VkDevice,VkDeviceMemory memory,uint64_t * pAddress)2776     VkResult on_vkMapMemoryIntoAddressSpaceGOOGLE_pre(
2777         void*,
2778         VkResult,
2779         VkDevice,
2780         VkDeviceMemory memory,
2781         uint64_t* pAddress) {
2782 
2783         AutoLock lock(mLock);
2784 
2785         auto it = info_VkDeviceMemory.find(memory);
2786         if (it == info_VkDeviceMemory.end()) {
2787             return VK_ERROR_OUT_OF_HOST_MEMORY;
2788         }
2789 
2790         auto& memInfo = it->second;
2791         memInfo.goldfishAddressSpaceBlock =
2792             new GoldfishAddressSpaceBlock;
2793         auto& block = *(memInfo.goldfishAddressSpaceBlock);
2794 
2795         block.allocate(
2796             mGoldfishAddressSpaceBlockProvider.get(),
2797             memInfo.mappedSize);
2798 
2799         *pAddress = block.physAddr();
2800 
2801         return VK_SUCCESS;
2802     }
2803 
on_vkMapMemoryIntoAddressSpaceGOOGLE(void *,VkResult input_result,VkDevice,VkDeviceMemory memory,uint64_t * pAddress)2804     VkResult on_vkMapMemoryIntoAddressSpaceGOOGLE(
2805         void*,
2806         VkResult input_result,
2807         VkDevice,
2808         VkDeviceMemory memory,
2809         uint64_t* pAddress) {
2810 
2811         if (input_result != VK_SUCCESS) {
2812             return input_result;
2813         }
2814 
2815         // Now pAddress points to the gpu addr from host.
2816         AutoLock lock(mLock);
2817 
2818         auto it = info_VkDeviceMemory.find(memory);
2819         if (it == info_VkDeviceMemory.end()) {
2820             return VK_ERROR_OUT_OF_HOST_MEMORY;
2821         }
2822 
2823         auto& memInfo = it->second;
2824         auto& block = *(memInfo.goldfishAddressSpaceBlock);
2825 
2826         uint64_t gpuAddr = *pAddress;
2827 
2828         void* userPtr = block.mmap(gpuAddr);
2829 
2830         D("%s: Got new host visible alloc. "
2831           "Sizeof void: %zu map size: %zu Range: [%p %p]",
2832           __func__,
2833           sizeof(void*), (size_t)memInfo.mappedSize,
2834           userPtr,
2835           (unsigned char*)userPtr + memInfo.mappedSize);
2836 
2837         *pAddress = (uint64_t)(uintptr_t)userPtr;
2838 
2839         return input_result;
2840     }
2841 
isDescriptorTypeImageInfo(VkDescriptorType descType)2842     bool isDescriptorTypeImageInfo(VkDescriptorType descType) {
2843         return (descType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
2844                (descType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
2845                (descType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
2846                (descType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) ||
2847                (descType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
2848     }
2849 
isDescriptorTypeBufferInfo(VkDescriptorType descType)2850     bool isDescriptorTypeBufferInfo(VkDescriptorType descType) {
2851         return (descType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
2852                (descType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
2853                (descType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
2854                (descType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
2855     }
2856 
isDescriptorTypeBufferView(VkDescriptorType descType)2857     bool isDescriptorTypeBufferView(VkDescriptorType descType) {
2858         return (descType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
2859                (descType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
2860     }
2861 
initDescriptorUpdateTemplateBuffers(const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,VkDescriptorUpdateTemplate descriptorUpdateTemplate)2862     VkResult initDescriptorUpdateTemplateBuffers(
2863         const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
2864         VkDescriptorUpdateTemplate descriptorUpdateTemplate) {
2865 
2866         AutoLock lock(mLock);
2867 
2868         auto it = info_VkDescriptorUpdateTemplate.find(descriptorUpdateTemplate);
2869         if (it == info_VkDescriptorUpdateTemplate.end()) {
2870             return VK_ERROR_INITIALIZATION_FAILED;
2871         }
2872 
2873         auto& info = it->second;
2874 
2875         size_t imageInfosNeeded = 0;
2876         size_t bufferInfosNeeded = 0;
2877         size_t bufferViewsNeeded = 0;
2878 
2879         for (uint32_t i = 0; i < pCreateInfo->descriptorUpdateEntryCount; ++i) {
2880             const auto& entry = pCreateInfo->pDescriptorUpdateEntries[i];
2881             uint32_t descCount = entry.descriptorCount;
2882             VkDescriptorType descType = entry.descriptorType;
2883 
2884             info.templateEntries.push_back(entry);
2885 
2886             for (uint32_t j = 0; j < descCount; ++j) {
2887                 if (isDescriptorTypeImageInfo(descType)) {
2888                     ++imageInfosNeeded;
2889                     info.imageInfoEntryIndices.push_back(i);
2890                 } else if (isDescriptorTypeBufferInfo(descType)) {
2891                     ++bufferInfosNeeded;
2892                     info.bufferInfoEntryIndices.push_back(i);
2893                 } else if (isDescriptorTypeBufferView(descType)) {
2894                     ++bufferViewsNeeded;
2895                     info.bufferViewEntryIndices.push_back(i);
2896                 } else {
2897                     ALOGE("%s: FATAL: Unknown descriptor type %d\n", __func__, descType);
2898                     abort();
2899                 }
2900             }
2901         }
2902 
2903         // To be filled in later (our flat structure)
2904         info.imageInfos.resize(imageInfosNeeded);
2905         info.bufferInfos.resize(bufferInfosNeeded);
2906         info.bufferViews.resize(bufferViewsNeeded);
2907 
2908         return VK_SUCCESS;
2909     }
2910 
on_vkCreateDescriptorUpdateTemplate(void * context,VkResult input_result,VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)2911     VkResult on_vkCreateDescriptorUpdateTemplate(
2912         void* context, VkResult input_result,
2913         VkDevice device,
2914         const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
2915         const VkAllocationCallbacks* pAllocator,
2916         VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
2917 
2918         (void)context;
2919         (void)device;
2920         (void)pAllocator;
2921 
2922         if (input_result != VK_SUCCESS) return input_result;
2923 
2924         return initDescriptorUpdateTemplateBuffers(pCreateInfo, *pDescriptorUpdateTemplate);
2925     }
2926 
on_vkCreateDescriptorUpdateTemplateKHR(void * context,VkResult input_result,VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)2927     VkResult on_vkCreateDescriptorUpdateTemplateKHR(
2928         void* context, VkResult input_result,
2929         VkDevice device,
2930         const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
2931         const VkAllocationCallbacks* pAllocator,
2932         VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
2933 
2934         (void)context;
2935         (void)device;
2936         (void)pAllocator;
2937 
2938         if (input_result != VK_SUCCESS) return input_result;
2939 
2940         return initDescriptorUpdateTemplateBuffers(pCreateInfo, *pDescriptorUpdateTemplate);
2941     }
2942 
on_vkUpdateDescriptorSetWithTemplate(void * context,VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)2943     void on_vkUpdateDescriptorSetWithTemplate(
2944         void* context,
2945         VkDevice device,
2946         VkDescriptorSet descriptorSet,
2947         VkDescriptorUpdateTemplate descriptorUpdateTemplate,
2948         const void* pData) {
2949 
2950         VkEncoder* enc = (VkEncoder*)context;
2951 
2952         uint8_t* userBuffer = (uint8_t*)pData;
2953         if (!userBuffer) return;
2954 
2955         AutoLock lock(mLock);
2956 
2957         auto it = info_VkDescriptorUpdateTemplate.find(descriptorUpdateTemplate);
2958         if (it == info_VkDescriptorUpdateTemplate.end()) {
2959             return;
2960         }
2961 
2962         auto& info = it->second;
2963 
2964         size_t currImageInfoOffset = 0;
2965         size_t currBufferInfoOffset = 0;
2966         size_t currBufferViewOffset = 0;
2967 
2968         for (const auto& entry : info.templateEntries) {
2969             VkDescriptorType descType = entry.descriptorType;
2970 
2971             auto offset = entry.offset;
2972             auto stride = entry.stride;
2973 
2974             uint32_t descCount = entry.descriptorCount;
2975 
2976             if (isDescriptorTypeImageInfo(descType)) {
2977                 if (!stride) stride = sizeof(VkDescriptorImageInfo);
2978                 for (uint32_t j = 0; j < descCount; ++j) {
2979                     memcpy(((uint8_t*)info.imageInfos.data()) + currImageInfoOffset,
2980                            userBuffer + offset + j * stride,
2981                            sizeof(VkDescriptorImageInfo));
2982                     currImageInfoOffset += sizeof(VkDescriptorImageInfo);
2983                 }
2984             } else if (isDescriptorTypeBufferInfo(descType)) {
2985                 if (!stride) stride = sizeof(VkDescriptorBufferInfo);
2986                 for (uint32_t j = 0; j < descCount; ++j) {
2987                     memcpy(((uint8_t*)info.bufferInfos.data()) + currBufferInfoOffset,
2988                            userBuffer + offset + j * stride,
2989                            sizeof(VkDescriptorBufferInfo));
2990                     currBufferInfoOffset += sizeof(VkDescriptorBufferInfo);
2991                 }
2992             } else if (isDescriptorTypeBufferView(descType)) {
2993                 if (!stride) stride = sizeof(VkBufferView);
2994                 for (uint32_t j = 0; j < descCount; ++j) {
2995                     memcpy(((uint8_t*)info.bufferViews.data()) + currBufferViewOffset,
2996                            userBuffer + offset + j * stride,
2997                            sizeof(VkBufferView));
2998                     currBufferViewOffset += sizeof(VkBufferView);
2999                 }
3000             } else {
3001                 ALOGE("%s: FATAL: Unknown descriptor type %d\n", __func__, descType);
3002                 abort();
3003             }
3004         }
3005 
3006         enc->vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
3007             device,
3008             descriptorSet,
3009             descriptorUpdateTemplate,
3010             (uint32_t)info.imageInfos.size(),
3011             (uint32_t)info.bufferInfos.size(),
3012             (uint32_t)info.bufferViews.size(),
3013             info.imageInfoEntryIndices.data(),
3014             info.bufferInfoEntryIndices.data(),
3015             info.bufferViewEntryIndices.data(),
3016             info.imageInfos.data(),
3017             info.bufferInfos.data(),
3018             info.bufferViews.data());
3019     }
3020 
on_vkGetPhysicalDeviceImageFormatProperties2_common(bool isKhr,void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)3021     VkResult on_vkGetPhysicalDeviceImageFormatProperties2_common(
3022         bool isKhr,
3023         void* context, VkResult input_result,
3024         VkPhysicalDevice physicalDevice,
3025         const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
3026         VkImageFormatProperties2* pImageFormatProperties) {
3027 
3028         VkEncoder* enc = (VkEncoder*)context;
3029         (void)input_result;
3030 
3031         VkAndroidHardwareBufferUsageANDROID* output_ahw_usage =
3032             vk_find_struct<VkAndroidHardwareBufferUsageANDROID>(pImageFormatProperties);
3033 
3034         VkResult hostRes;
3035 
3036         if (isKhr) {
3037             hostRes = enc->vkGetPhysicalDeviceImageFormatProperties2KHR(
3038                 physicalDevice, pImageFormatInfo,
3039                 pImageFormatProperties);
3040         } else {
3041             hostRes = enc->vkGetPhysicalDeviceImageFormatProperties2(
3042                 physicalDevice, pImageFormatInfo,
3043                 pImageFormatProperties);
3044         }
3045 
3046         if (hostRes != VK_SUCCESS) return hostRes;
3047 
3048         if (output_ahw_usage) {
3049             output_ahw_usage->androidHardwareBufferUsage =
3050                 getAndroidHardwareBufferUsageFromVkUsage(
3051                     pImageFormatInfo->flags,
3052                     pImageFormatInfo->usage);
3053         }
3054 
3055         return hostRes;
3056     }
3057 
on_vkGetPhysicalDeviceImageFormatProperties2(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)3058     VkResult on_vkGetPhysicalDeviceImageFormatProperties2(
3059         void* context, VkResult input_result,
3060         VkPhysicalDevice physicalDevice,
3061         const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
3062         VkImageFormatProperties2* pImageFormatProperties) {
3063         return on_vkGetPhysicalDeviceImageFormatProperties2_common(
3064             false /* not KHR */, context, input_result,
3065             physicalDevice, pImageFormatInfo, pImageFormatProperties);
3066     }
3067 
on_vkGetPhysicalDeviceImageFormatProperties2KHR(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)3068     VkResult on_vkGetPhysicalDeviceImageFormatProperties2KHR(
3069         void* context, VkResult input_result,
3070         VkPhysicalDevice physicalDevice,
3071         const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
3072         VkImageFormatProperties2* pImageFormatProperties) {
3073         return on_vkGetPhysicalDeviceImageFormatProperties2_common(
3074             true /* is KHR */, context, input_result,
3075             physicalDevice, pImageFormatInfo, pImageFormatProperties);
3076     }
3077 
on_vkBeginCommandBuffer(void * context,VkResult input_result,VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo)3078     VkResult on_vkBeginCommandBuffer(
3079         void* context, VkResult input_result,
3080         VkCommandBuffer commandBuffer,
3081         const VkCommandBufferBeginInfo* pBeginInfo) {
3082 
3083         VkEncoder* enc = (VkEncoder*)context;
3084         (void)input_result;
3085 
3086         if (!supportsDeferredCommands()) {
3087             return enc->vkBeginCommandBuffer(commandBuffer, pBeginInfo);
3088         }
3089 
3090         enc->vkBeginCommandBufferAsyncGOOGLE(commandBuffer, pBeginInfo);
3091         return VK_SUCCESS;
3092     }
3093 
on_vkEndCommandBuffer(void * context,VkResult input_result,VkCommandBuffer commandBuffer)3094     VkResult on_vkEndCommandBuffer(
3095         void* context, VkResult input_result,
3096         VkCommandBuffer commandBuffer) {
3097 
3098         VkEncoder* enc = (VkEncoder*)context;
3099         (void)input_result;
3100 
3101         if (!supportsDeferredCommands()) {
3102             return enc->vkEndCommandBuffer(commandBuffer);
3103         }
3104 
3105         enc->vkEndCommandBufferAsyncGOOGLE(commandBuffer);
3106         return VK_SUCCESS;
3107     }
3108 
on_vkResetCommandBuffer(void * context,VkResult input_result,VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)3109     VkResult on_vkResetCommandBuffer(
3110         void* context, VkResult input_result,
3111         VkCommandBuffer commandBuffer,
3112         VkCommandBufferResetFlags flags) {
3113 
3114         VkEncoder* enc = (VkEncoder*)context;
3115         (void)input_result;
3116 
3117         if (!supportsDeferredCommands()) {
3118             return enc->vkResetCommandBuffer(commandBuffer, flags);
3119         }
3120 
3121         enc->vkResetCommandBufferAsyncGOOGLE(commandBuffer, flags);
3122         return VK_SUCCESS;
3123     }
3124 
on_vkCreateImageView(void * context,VkResult input_result,VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)3125     VkResult on_vkCreateImageView(
3126         void* context, VkResult input_result,
3127         VkDevice device,
3128         const VkImageViewCreateInfo* pCreateInfo,
3129         const VkAllocationCallbacks* pAllocator,
3130         VkImageView* pView) {
3131 
3132         VkEncoder* enc = (VkEncoder*)context;
3133         (void)input_result;
3134 
3135         VkImageViewCreateInfo localCreateInfo = vk_make_orphan_copy(*pCreateInfo);
3136 
3137 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3138         const VkExternalFormatANDROID* extFormatAndroidPtr =
3139             vk_find_struct<VkExternalFormatANDROID>(pCreateInfo);
3140         if (extFormatAndroidPtr) {
3141             if (extFormatAndroidPtr->externalFormat) {
3142                 localCreateInfo.format =
3143                     vk_format_from_android(extFormatAndroidPtr->externalFormat);
3144             }
3145         }
3146 #endif
3147 
3148         return enc->vkCreateImageView(device, &localCreateInfo, pAllocator, pView);
3149     }
3150 
getApiVersionFromInstance(VkInstance instance) const3151     uint32_t getApiVersionFromInstance(VkInstance instance) const {
3152         AutoLock lock(mLock);
3153         uint32_t api = kMinApiVersion;
3154 
3155         auto it = info_VkInstance.find(instance);
3156         if (it == info_VkInstance.end()) return api;
3157 
3158         api = it->second.highestApiVersion;
3159 
3160         return api;
3161     }
3162 
getApiVersionFromDevice(VkDevice device) const3163     uint32_t getApiVersionFromDevice(VkDevice device) const {
3164         AutoLock lock(mLock);
3165 
3166         uint32_t api = kMinApiVersion;
3167 
3168         auto it = info_VkDevice.find(device);
3169         if (it == info_VkDevice.end()) return api;
3170 
3171         api = it->second.apiVersion;
3172 
3173         return api;
3174     }
3175 
hasInstanceExtension(VkInstance instance,const std::string & name) const3176     bool hasInstanceExtension(VkInstance instance, const std::string& name) const {
3177         AutoLock lock(mLock);
3178 
3179         auto it = info_VkInstance.find(instance);
3180         if (it == info_VkInstance.end()) return false;
3181 
3182         return it->second.enabledExtensions.find(name) !=
3183                it->second.enabledExtensions.end();
3184     }
3185 
hasDeviceExtension(VkDevice device,const std::string & name) const3186     bool hasDeviceExtension(VkDevice device, const std::string& name) const {
3187         AutoLock lock(mLock);
3188 
3189         auto it = info_VkDevice.find(device);
3190         if (it == info_VkDevice.end()) return false;
3191 
3192         return it->second.enabledExtensions.find(name) !=
3193                it->second.enabledExtensions.end();
3194     }
3195 
3196 private:
3197     mutable Lock mLock;
3198     HostVisibleMemoryVirtualizationInfo mHostVisibleMemoryVirtInfo;
3199     std::unique_ptr<EmulatorFeatureInfo> mFeatureInfo;
3200     std::unique_ptr<GoldfishAddressSpaceBlockProvider> mGoldfishAddressSpaceBlockProvider;
3201 
3202     std::vector<VkExtensionProperties> mHostInstanceExtensions;
3203     std::vector<VkExtensionProperties> mHostDeviceExtensions;
3204 
3205     int mSyncDeviceFd = -1;
3206 
3207 #ifdef VK_USE_PLATFORM_FUCHSIA
3208     zx_handle_t mControlDevice = ZX_HANDLE_INVALID;
3209     fuchsia::sysmem::AllocatorSyncPtr mSysmemAllocator;
3210 #endif
3211 };
3212 
ResourceTracker()3213 ResourceTracker::ResourceTracker() : mImpl(new ResourceTracker::Impl()) { }
~ResourceTracker()3214 ResourceTracker::~ResourceTracker() { }
createMapping()3215 VulkanHandleMapping* ResourceTracker::createMapping() {
3216     return &mImpl->createMapping;
3217 }
unwrapMapping()3218 VulkanHandleMapping* ResourceTracker::unwrapMapping() {
3219     return &mImpl->unwrapMapping;
3220 }
destroyMapping()3221 VulkanHandleMapping* ResourceTracker::destroyMapping() {
3222     return &mImpl->destroyMapping;
3223 }
defaultMapping()3224 VulkanHandleMapping* ResourceTracker::defaultMapping() {
3225     return &mImpl->defaultMapping;
3226 }
3227 static ResourceTracker* sTracker = nullptr;
3228 // static
get()3229 ResourceTracker* ResourceTracker::get() {
3230     if (!sTracker) {
3231         // To be initialized once on vulkan device open.
3232         sTracker = new ResourceTracker;
3233     }
3234     return sTracker;
3235 }
3236 
3237 #define HANDLE_REGISTER_IMPL(type) \
3238     void ResourceTracker::register_##type(type obj) { \
3239         mImpl->register_##type(obj); \
3240     } \
3241     void ResourceTracker::unregister_##type(type obj) { \
3242         mImpl->unregister_##type(obj); \
3243     } \
3244 
GOLDFISH_VK_LIST_HANDLE_TYPES(HANDLE_REGISTER_IMPL)3245 GOLDFISH_VK_LIST_HANDLE_TYPES(HANDLE_REGISTER_IMPL)
3246 
3247 bool ResourceTracker::isMemoryTypeHostVisible(
3248     VkDevice device, uint32_t typeIndex) const {
3249     return mImpl->isMemoryTypeHostVisible(device, typeIndex);
3250 }
3251 
getMappedPointer(VkDeviceMemory memory)3252 uint8_t* ResourceTracker::getMappedPointer(VkDeviceMemory memory) {
3253     return mImpl->getMappedPointer(memory);
3254 }
3255 
getMappedSize(VkDeviceMemory memory)3256 VkDeviceSize ResourceTracker::getMappedSize(VkDeviceMemory memory) {
3257     return mImpl->getMappedSize(memory);
3258 }
3259 
getNonCoherentExtendedSize(VkDevice device,VkDeviceSize basicSize) const3260 VkDeviceSize ResourceTracker::getNonCoherentExtendedSize(VkDevice device, VkDeviceSize basicSize) const {
3261     return mImpl->getNonCoherentExtendedSize(device, basicSize);
3262 }
3263 
isValidMemoryRange(const VkMappedMemoryRange & range) const3264 bool ResourceTracker::isValidMemoryRange(const VkMappedMemoryRange& range) const {
3265     return mImpl->isValidMemoryRange(range);
3266 }
3267 
setupFeatures(const EmulatorFeatureInfo * features)3268 void ResourceTracker::setupFeatures(const EmulatorFeatureInfo* features) {
3269     mImpl->setupFeatures(features);
3270 }
3271 
hostSupportsVulkan() const3272 bool ResourceTracker::hostSupportsVulkan() const {
3273     return mImpl->hostSupportsVulkan();
3274 }
3275 
usingDirectMapping() const3276 bool ResourceTracker::usingDirectMapping() const {
3277     return mImpl->usingDirectMapping();
3278 }
3279 
getApiVersionFromInstance(VkInstance instance) const3280 uint32_t ResourceTracker::getApiVersionFromInstance(VkInstance instance) const {
3281     return mImpl->getApiVersionFromInstance(instance);
3282 }
3283 
getApiVersionFromDevice(VkDevice device) const3284 uint32_t ResourceTracker::getApiVersionFromDevice(VkDevice device) const {
3285     return mImpl->getApiVersionFromDevice(device);
3286 }
hasInstanceExtension(VkInstance instance,const std::string & name) const3287 bool ResourceTracker::hasInstanceExtension(VkInstance instance, const std::string &name) const {
3288     return mImpl->hasInstanceExtension(instance, name);
3289 }
hasDeviceExtension(VkDevice device,const std::string & name) const3290 bool ResourceTracker::hasDeviceExtension(VkDevice device, const std::string &name) const {
3291     return mImpl->hasDeviceExtension(device, name);
3292 }
3293 
on_vkEnumerateInstanceExtensionProperties(void * context,VkResult input_result,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)3294 VkResult ResourceTracker::on_vkEnumerateInstanceExtensionProperties(
3295     void* context,
3296     VkResult input_result,
3297     const char* pLayerName,
3298     uint32_t* pPropertyCount,
3299     VkExtensionProperties* pProperties) {
3300     return mImpl->on_vkEnumerateInstanceExtensionProperties(
3301         context, input_result, pLayerName, pPropertyCount, pProperties);
3302 }
3303 
on_vkEnumerateDeviceExtensionProperties(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)3304 VkResult ResourceTracker::on_vkEnumerateDeviceExtensionProperties(
3305     void* context,
3306     VkResult input_result,
3307     VkPhysicalDevice physicalDevice,
3308     const char* pLayerName,
3309     uint32_t* pPropertyCount,
3310     VkExtensionProperties* pProperties) {
3311     return mImpl->on_vkEnumerateDeviceExtensionProperties(
3312         context, input_result, physicalDevice, pLayerName, pPropertyCount, pProperties);
3313 }
3314 
on_vkEnumeratePhysicalDevices(void * context,VkResult input_result,VkInstance instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)3315 VkResult ResourceTracker::on_vkEnumeratePhysicalDevices(
3316     void* context, VkResult input_result,
3317     VkInstance instance, uint32_t* pPhysicalDeviceCount,
3318     VkPhysicalDevice* pPhysicalDevices) {
3319     return mImpl->on_vkEnumeratePhysicalDevices(
3320         context, input_result, instance, pPhysicalDeviceCount,
3321         pPhysicalDevices);
3322 }
3323 
on_vkGetPhysicalDeviceMemoryProperties(void * context,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)3324 void ResourceTracker::on_vkGetPhysicalDeviceMemoryProperties(
3325     void* context,
3326     VkPhysicalDevice physicalDevice,
3327     VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
3328     mImpl->on_vkGetPhysicalDeviceMemoryProperties(
3329         context, physicalDevice, pMemoryProperties);
3330 }
3331 
on_vkGetPhysicalDeviceMemoryProperties2(void * context,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)3332 void ResourceTracker::on_vkGetPhysicalDeviceMemoryProperties2(
3333     void* context,
3334     VkPhysicalDevice physicalDevice,
3335     VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
3336     mImpl->on_vkGetPhysicalDeviceMemoryProperties2(
3337         context, physicalDevice, pMemoryProperties);
3338 }
3339 
on_vkGetPhysicalDeviceMemoryProperties2KHR(void * context,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)3340 void ResourceTracker::on_vkGetPhysicalDeviceMemoryProperties2KHR(
3341     void* context,
3342     VkPhysicalDevice physicalDevice,
3343     VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
3344     mImpl->on_vkGetPhysicalDeviceMemoryProperties2(
3345         context, physicalDevice, pMemoryProperties);
3346 }
3347 
on_vkCreateInstance(void * context,VkResult input_result,const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)3348 VkResult ResourceTracker::on_vkCreateInstance(
3349     void* context,
3350     VkResult input_result,
3351     const VkInstanceCreateInfo* pCreateInfo,
3352     const VkAllocationCallbacks* pAllocator,
3353     VkInstance* pInstance) {
3354     return mImpl->on_vkCreateInstance(
3355         context, input_result, pCreateInfo, pAllocator, pInstance);
3356 }
3357 
on_vkCreateDevice(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)3358 VkResult ResourceTracker::on_vkCreateDevice(
3359     void* context,
3360     VkResult input_result,
3361     VkPhysicalDevice physicalDevice,
3362     const VkDeviceCreateInfo* pCreateInfo,
3363     const VkAllocationCallbacks* pAllocator,
3364     VkDevice* pDevice) {
3365     return mImpl->on_vkCreateDevice(
3366         context, input_result, physicalDevice, pCreateInfo, pAllocator, pDevice);
3367 }
3368 
on_vkDestroyDevice_pre(void * context,VkDevice device,const VkAllocationCallbacks * pAllocator)3369 void ResourceTracker::on_vkDestroyDevice_pre(
3370     void* context,
3371     VkDevice device,
3372     const VkAllocationCallbacks* pAllocator) {
3373     mImpl->on_vkDestroyDevice_pre(context, device, pAllocator);
3374 }
3375 
on_vkAllocateMemory(void * context,VkResult input_result,VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)3376 VkResult ResourceTracker::on_vkAllocateMemory(
3377     void* context,
3378     VkResult input_result,
3379     VkDevice device,
3380     const VkMemoryAllocateInfo* pAllocateInfo,
3381     const VkAllocationCallbacks* pAllocator,
3382     VkDeviceMemory* pMemory) {
3383     return mImpl->on_vkAllocateMemory(
3384         context, input_result, device, pAllocateInfo, pAllocator, pMemory);
3385 }
3386 
on_vkFreeMemory(void * context,VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)3387 void ResourceTracker::on_vkFreeMemory(
3388     void* context,
3389     VkDevice device,
3390     VkDeviceMemory memory,
3391     const VkAllocationCallbacks* pAllocator) {
3392     return mImpl->on_vkFreeMemory(
3393         context, device, memory, pAllocator);
3394 }
3395 
on_vkMapMemory(void * context,VkResult input_result,VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)3396 VkResult ResourceTracker::on_vkMapMemory(
3397     void* context,
3398     VkResult input_result,
3399     VkDevice device,
3400     VkDeviceMemory memory,
3401     VkDeviceSize offset,
3402     VkDeviceSize size,
3403     VkMemoryMapFlags flags,
3404     void** ppData) {
3405     return mImpl->on_vkMapMemory(
3406         context, input_result, device, memory, offset, size, flags, ppData);
3407 }
3408 
on_vkUnmapMemory(void * context,VkDevice device,VkDeviceMemory memory)3409 void ResourceTracker::on_vkUnmapMemory(
3410     void* context,
3411     VkDevice device,
3412     VkDeviceMemory memory) {
3413     mImpl->on_vkUnmapMemory(context, device, memory);
3414 }
3415 
on_vkCreateImage(void * context,VkResult input_result,VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)3416 VkResult ResourceTracker::on_vkCreateImage(
3417     void* context, VkResult input_result,
3418     VkDevice device, const VkImageCreateInfo *pCreateInfo,
3419     const VkAllocationCallbacks *pAllocator,
3420     VkImage *pImage) {
3421     return mImpl->on_vkCreateImage(
3422         context, input_result,
3423         device, pCreateInfo, pAllocator, pImage);
3424 }
3425 
on_vkDestroyImage(void * context,VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)3426 void ResourceTracker::on_vkDestroyImage(
3427     void* context,
3428     VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
3429     mImpl->on_vkDestroyImage(context,
3430         device, image, pAllocator);
3431 }
3432 
on_vkGetImageMemoryRequirements(void * context,VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)3433 void ResourceTracker::on_vkGetImageMemoryRequirements(
3434     void *context, VkDevice device, VkImage image,
3435     VkMemoryRequirements *pMemoryRequirements) {
3436     mImpl->on_vkGetImageMemoryRequirements(
3437         context, device, image, pMemoryRequirements);
3438 }
3439 
on_vkGetImageMemoryRequirements2(void * context,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3440 void ResourceTracker::on_vkGetImageMemoryRequirements2(
3441     void *context, VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
3442     VkMemoryRequirements2 *pMemoryRequirements) {
3443     mImpl->on_vkGetImageMemoryRequirements2(
3444         context, device, pInfo, pMemoryRequirements);
3445 }
3446 
on_vkGetImageMemoryRequirements2KHR(void * context,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3447 void ResourceTracker::on_vkGetImageMemoryRequirements2KHR(
3448     void *context, VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
3449     VkMemoryRequirements2 *pMemoryRequirements) {
3450     mImpl->on_vkGetImageMemoryRequirements2KHR(
3451         context, device, pInfo, pMemoryRequirements);
3452 }
3453 
on_vkBindImageMemory(void * context,VkResult input_result,VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)3454 VkResult ResourceTracker::on_vkBindImageMemory(
3455     void* context, VkResult input_result,
3456     VkDevice device, VkImage image, VkDeviceMemory memory,
3457     VkDeviceSize memoryOffset) {
3458     return mImpl->on_vkBindImageMemory(
3459         context, input_result, device, image, memory, memoryOffset);
3460 }
3461 
on_vkBindImageMemory2(void * context,VkResult input_result,VkDevice device,uint32_t bindingCount,const VkBindImageMemoryInfo * pBindInfos)3462 VkResult ResourceTracker::on_vkBindImageMemory2(
3463     void* context, VkResult input_result,
3464     VkDevice device, uint32_t bindingCount, const VkBindImageMemoryInfo* pBindInfos) {
3465     return mImpl->on_vkBindImageMemory2(
3466         context, input_result, device, bindingCount, pBindInfos);
3467 }
3468 
on_vkBindImageMemory2KHR(void * context,VkResult input_result,VkDevice device,uint32_t bindingCount,const VkBindImageMemoryInfo * pBindInfos)3469 VkResult ResourceTracker::on_vkBindImageMemory2KHR(
3470     void* context, VkResult input_result,
3471     VkDevice device, uint32_t bindingCount, const VkBindImageMemoryInfo* pBindInfos) {
3472     return mImpl->on_vkBindImageMemory2KHR(
3473         context, input_result, device, bindingCount, pBindInfos);
3474 }
3475 
on_vkCreateBuffer(void * context,VkResult input_result,VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)3476 VkResult ResourceTracker::on_vkCreateBuffer(
3477     void* context, VkResult input_result,
3478     VkDevice device, const VkBufferCreateInfo *pCreateInfo,
3479     const VkAllocationCallbacks *pAllocator,
3480     VkBuffer *pBuffer) {
3481     return mImpl->on_vkCreateBuffer(
3482         context, input_result,
3483         device, pCreateInfo, pAllocator, pBuffer);
3484 }
3485 
on_vkDestroyBuffer(void * context,VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)3486 void ResourceTracker::on_vkDestroyBuffer(
3487     void* context,
3488     VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
3489     mImpl->on_vkDestroyBuffer(context, device, buffer, pAllocator);
3490 }
3491 
on_vkGetBufferMemoryRequirements(void * context,VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)3492 void ResourceTracker::on_vkGetBufferMemoryRequirements(
3493     void* context, VkDevice device, VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
3494     mImpl->on_vkGetBufferMemoryRequirements(context, device, buffer, pMemoryRequirements);
3495 }
3496 
on_vkGetBufferMemoryRequirements2(void * context,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3497 void ResourceTracker::on_vkGetBufferMemoryRequirements2(
3498     void* context, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
3499     VkMemoryRequirements2* pMemoryRequirements) {
3500     mImpl->on_vkGetBufferMemoryRequirements2(
3501         context, device, pInfo, pMemoryRequirements);
3502 }
3503 
on_vkGetBufferMemoryRequirements2KHR(void * context,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3504 void ResourceTracker::on_vkGetBufferMemoryRequirements2KHR(
3505     void* context, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
3506     VkMemoryRequirements2* pMemoryRequirements) {
3507     mImpl->on_vkGetBufferMemoryRequirements2KHR(
3508         context, device, pInfo, pMemoryRequirements);
3509 }
3510 
on_vkBindBufferMemory(void * context,VkResult input_result,VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)3511 VkResult ResourceTracker::on_vkBindBufferMemory(
3512     void* context, VkResult input_result,
3513     VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) {
3514     return mImpl->on_vkBindBufferMemory(
3515         context, input_result,
3516         device, buffer, memory, memoryOffset);
3517 }
3518 
on_vkBindBufferMemory2(void * context,VkResult input_result,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)3519 VkResult ResourceTracker::on_vkBindBufferMemory2(
3520     void* context, VkResult input_result,
3521     VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos) {
3522     return mImpl->on_vkBindBufferMemory2(
3523         context, input_result,
3524         device, bindInfoCount, pBindInfos);
3525 }
3526 
on_vkBindBufferMemory2KHR(void * context,VkResult input_result,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)3527 VkResult ResourceTracker::on_vkBindBufferMemory2KHR(
3528     void* context, VkResult input_result,
3529     VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos) {
3530     return mImpl->on_vkBindBufferMemory2KHR(
3531         context, input_result,
3532         device, bindInfoCount, pBindInfos);
3533 }
3534 
on_vkCreateSemaphore(void * context,VkResult input_result,VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)3535 VkResult ResourceTracker::on_vkCreateSemaphore(
3536     void* context, VkResult input_result,
3537     VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
3538     const VkAllocationCallbacks *pAllocator,
3539     VkSemaphore *pSemaphore) {
3540     return mImpl->on_vkCreateSemaphore(
3541         context, input_result,
3542         device, pCreateInfo, pAllocator, pSemaphore);
3543 }
3544 
on_vkDestroySemaphore(void * context,VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)3545 void ResourceTracker::on_vkDestroySemaphore(
3546     void* context,
3547     VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) {
3548     mImpl->on_vkDestroySemaphore(context, device, semaphore, pAllocator);
3549 }
3550 
on_vkQueueSubmit(void * context,VkResult input_result,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)3551 VkResult ResourceTracker::on_vkQueueSubmit(
3552     void* context, VkResult input_result,
3553     VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence) {
3554     return mImpl->on_vkQueueSubmit(
3555         context, input_result, queue, submitCount, pSubmits, fence);
3556 }
3557 
on_vkGetSemaphoreFdKHR(void * context,VkResult input_result,VkDevice device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)3558 VkResult ResourceTracker::on_vkGetSemaphoreFdKHR(
3559     void* context, VkResult input_result,
3560     VkDevice device,
3561     const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
3562     int* pFd) {
3563     return mImpl->on_vkGetSemaphoreFdKHR(context, input_result, device, pGetFdInfo, pFd);
3564 }
3565 
on_vkImportSemaphoreFdKHR(void * context,VkResult input_result,VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo)3566 VkResult ResourceTracker::on_vkImportSemaphoreFdKHR(
3567     void* context, VkResult input_result,
3568     VkDevice device,
3569     const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
3570     return mImpl->on_vkImportSemaphoreFdKHR(context, input_result, device, pImportSemaphoreFdInfo);
3571 }
3572 
unwrap_VkNativeBufferANDROID(const VkImageCreateInfo * pCreateInfo,VkImageCreateInfo * local_pCreateInfo)3573 void ResourceTracker::unwrap_VkNativeBufferANDROID(
3574     const VkImageCreateInfo* pCreateInfo,
3575     VkImageCreateInfo* local_pCreateInfo) {
3576     mImpl->unwrap_VkNativeBufferANDROID(pCreateInfo, local_pCreateInfo);
3577 }
3578 
unwrap_vkAcquireImageANDROID_nativeFenceFd(int fd,int * fd_out)3579 void ResourceTracker::unwrap_vkAcquireImageANDROID_nativeFenceFd(int fd, int* fd_out) {
3580     mImpl->unwrap_vkAcquireImageANDROID_nativeFenceFd(fd, fd_out);
3581 }
3582 
3583 #ifdef VK_USE_PLATFORM_FUCHSIA
on_vkGetMemoryZirconHandleFUCHSIA(void * context,VkResult input_result,VkDevice device,const VkMemoryGetZirconHandleInfoFUCHSIA * pInfo,uint32_t * pHandle)3584 VkResult ResourceTracker::on_vkGetMemoryZirconHandleFUCHSIA(
3585     void* context, VkResult input_result,
3586     VkDevice device,
3587     const VkMemoryGetZirconHandleInfoFUCHSIA* pInfo,
3588     uint32_t* pHandle) {
3589     return mImpl->on_vkGetMemoryZirconHandleFUCHSIA(
3590         context, input_result, device, pInfo, pHandle);
3591 }
3592 
on_vkGetMemoryZirconHandlePropertiesFUCHSIA(void * context,VkResult input_result,VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,uint32_t handle,VkMemoryZirconHandlePropertiesFUCHSIA * pProperties)3593 VkResult ResourceTracker::on_vkGetMemoryZirconHandlePropertiesFUCHSIA(
3594     void* context, VkResult input_result,
3595     VkDevice device,
3596     VkExternalMemoryHandleTypeFlagBits handleType,
3597     uint32_t handle,
3598     VkMemoryZirconHandlePropertiesFUCHSIA* pProperties) {
3599     return mImpl->on_vkGetMemoryZirconHandlePropertiesFUCHSIA(
3600         context, input_result, device, handleType, handle, pProperties);
3601 }
3602 
on_vkGetSemaphoreZirconHandleFUCHSIA(void * context,VkResult input_result,VkDevice device,const VkSemaphoreGetZirconHandleInfoFUCHSIA * pInfo,uint32_t * pHandle)3603 VkResult ResourceTracker::on_vkGetSemaphoreZirconHandleFUCHSIA(
3604     void* context, VkResult input_result,
3605     VkDevice device,
3606     const VkSemaphoreGetZirconHandleInfoFUCHSIA* pInfo,
3607     uint32_t* pHandle) {
3608     return mImpl->on_vkGetSemaphoreZirconHandleFUCHSIA(
3609         context, input_result, device, pInfo, pHandle);
3610 }
3611 
on_vkImportSemaphoreZirconHandleFUCHSIA(void * context,VkResult input_result,VkDevice device,const VkImportSemaphoreZirconHandleInfoFUCHSIA * pInfo)3612 VkResult ResourceTracker::on_vkImportSemaphoreZirconHandleFUCHSIA(
3613     void* context, VkResult input_result,
3614     VkDevice device,
3615     const VkImportSemaphoreZirconHandleInfoFUCHSIA* pInfo) {
3616     return mImpl->on_vkImportSemaphoreZirconHandleFUCHSIA(
3617         context, input_result, device, pInfo);
3618 }
3619 
on_vkCreateBufferCollectionFUCHSIA(void * context,VkResult input_result,VkDevice device,const VkBufferCollectionCreateInfoFUCHSIA * pInfo,const VkAllocationCallbacks * pAllocator,VkBufferCollectionFUCHSIA * pCollection)3620 VkResult ResourceTracker::on_vkCreateBufferCollectionFUCHSIA(
3621     void* context, VkResult input_result,
3622     VkDevice device,
3623     const VkBufferCollectionCreateInfoFUCHSIA* pInfo,
3624     const VkAllocationCallbacks* pAllocator,
3625     VkBufferCollectionFUCHSIA* pCollection) {
3626     return mImpl->on_vkCreateBufferCollectionFUCHSIA(
3627         context, input_result, device, pInfo, pAllocator, pCollection);
3628 }
3629 
on_vkDestroyBufferCollectionFUCHSIA(void * context,VkResult input_result,VkDevice device,VkBufferCollectionFUCHSIA collection,const VkAllocationCallbacks * pAllocator)3630 void ResourceTracker::on_vkDestroyBufferCollectionFUCHSIA(
3631         void* context, VkResult input_result,
3632         VkDevice device,
3633         VkBufferCollectionFUCHSIA collection,
3634         const VkAllocationCallbacks* pAllocator) {
3635     return mImpl->on_vkDestroyBufferCollectionFUCHSIA(
3636         context, input_result, device, collection, pAllocator);
3637 }
3638 
on_vkSetBufferCollectionConstraintsFUCHSIA(void * context,VkResult input_result,VkDevice device,VkBufferCollectionFUCHSIA collection,const VkImageCreateInfo * pImageInfo)3639 VkResult ResourceTracker::on_vkSetBufferCollectionConstraintsFUCHSIA(
3640         void* context, VkResult input_result,
3641         VkDevice device,
3642         VkBufferCollectionFUCHSIA collection,
3643         const VkImageCreateInfo* pImageInfo) {
3644     return mImpl->on_vkSetBufferCollectionConstraintsFUCHSIA(
3645         context, input_result, device, collection, pImageInfo);
3646 }
3647 
on_vkGetBufferCollectionPropertiesFUCHSIA(void * context,VkResult input_result,VkDevice device,VkBufferCollectionFUCHSIA collection,VkBufferCollectionPropertiesFUCHSIA * pProperties)3648 VkResult ResourceTracker::on_vkGetBufferCollectionPropertiesFUCHSIA(
3649         void* context, VkResult input_result,
3650         VkDevice device,
3651         VkBufferCollectionFUCHSIA collection,
3652         VkBufferCollectionPropertiesFUCHSIA* pProperties) {
3653     return mImpl->on_vkGetBufferCollectionPropertiesFUCHSIA(
3654         context, input_result, device, collection, pProperties);
3655 }
3656 #endif
3657 
on_vkGetAndroidHardwareBufferPropertiesANDROID(void * context,VkResult input_result,VkDevice device,const AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties)3658 VkResult ResourceTracker::on_vkGetAndroidHardwareBufferPropertiesANDROID(
3659     void* context, VkResult input_result,
3660     VkDevice device,
3661     const AHardwareBuffer* buffer,
3662     VkAndroidHardwareBufferPropertiesANDROID* pProperties) {
3663     return mImpl->on_vkGetAndroidHardwareBufferPropertiesANDROID(
3664         context, input_result, device, buffer, pProperties);
3665 }
on_vkGetMemoryAndroidHardwareBufferANDROID(void * context,VkResult input_result,VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer)3666 VkResult ResourceTracker::on_vkGetMemoryAndroidHardwareBufferANDROID(
3667     void* context, VkResult input_result,
3668     VkDevice device,
3669     const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
3670     struct AHardwareBuffer** pBuffer) {
3671     return mImpl->on_vkGetMemoryAndroidHardwareBufferANDROID(
3672         context, input_result,
3673         device, pInfo, pBuffer);
3674 }
3675 
on_vkCreateSamplerYcbcrConversion(void * context,VkResult input_result,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)3676 VkResult ResourceTracker::on_vkCreateSamplerYcbcrConversion(
3677     void* context, VkResult input_result,
3678     VkDevice device,
3679     const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
3680     const VkAllocationCallbacks* pAllocator,
3681     VkSamplerYcbcrConversion* pYcbcrConversion) {
3682     return mImpl->on_vkCreateSamplerYcbcrConversion(
3683         context, input_result, device, pCreateInfo, pAllocator, pYcbcrConversion);
3684 }
3685 
on_vkCreateSamplerYcbcrConversionKHR(void * context,VkResult input_result,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)3686 VkResult ResourceTracker::on_vkCreateSamplerYcbcrConversionKHR(
3687     void* context, VkResult input_result,
3688     VkDevice device,
3689     const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
3690     const VkAllocationCallbacks* pAllocator,
3691     VkSamplerYcbcrConversion* pYcbcrConversion) {
3692     return mImpl->on_vkCreateSamplerYcbcrConversionKHR(
3693         context, input_result, device, pCreateInfo, pAllocator, pYcbcrConversion);
3694 }
3695 
on_vkMapMemoryIntoAddressSpaceGOOGLE_pre(void * context,VkResult input_result,VkDevice device,VkDeviceMemory memory,uint64_t * pAddress)3696 VkResult ResourceTracker::on_vkMapMemoryIntoAddressSpaceGOOGLE_pre(
3697     void* context,
3698     VkResult input_result,
3699     VkDevice device,
3700     VkDeviceMemory memory,
3701     uint64_t* pAddress) {
3702     return mImpl->on_vkMapMemoryIntoAddressSpaceGOOGLE_pre(
3703         context, input_result, device, memory, pAddress);
3704 }
3705 
on_vkMapMemoryIntoAddressSpaceGOOGLE(void * context,VkResult input_result,VkDevice device,VkDeviceMemory memory,uint64_t * pAddress)3706 VkResult ResourceTracker::on_vkMapMemoryIntoAddressSpaceGOOGLE(
3707     void* context,
3708     VkResult input_result,
3709     VkDevice device,
3710     VkDeviceMemory memory,
3711     uint64_t* pAddress) {
3712     return mImpl->on_vkMapMemoryIntoAddressSpaceGOOGLE(
3713         context, input_result, device, memory, pAddress);
3714 }
3715 
on_vkCreateDescriptorUpdateTemplate(void * context,VkResult input_result,VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)3716 VkResult ResourceTracker::on_vkCreateDescriptorUpdateTemplate(
3717     void* context, VkResult input_result,
3718     VkDevice device,
3719     const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
3720     const VkAllocationCallbacks* pAllocator,
3721     VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
3722     return mImpl->on_vkCreateDescriptorUpdateTemplate(
3723         context, input_result,
3724         device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
3725 }
3726 
on_vkCreateDescriptorUpdateTemplateKHR(void * context,VkResult input_result,VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)3727 VkResult ResourceTracker::on_vkCreateDescriptorUpdateTemplateKHR(
3728     void* context, VkResult input_result,
3729     VkDevice device,
3730     const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
3731     const VkAllocationCallbacks* pAllocator,
3732     VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
3733     return mImpl->on_vkCreateDescriptorUpdateTemplateKHR(
3734         context, input_result,
3735         device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
3736 }
3737 
on_vkUpdateDescriptorSetWithTemplate(void * context,VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)3738 void ResourceTracker::on_vkUpdateDescriptorSetWithTemplate(
3739     void* context,
3740     VkDevice device,
3741     VkDescriptorSet descriptorSet,
3742     VkDescriptorUpdateTemplate descriptorUpdateTemplate,
3743     const void* pData) {
3744     mImpl->on_vkUpdateDescriptorSetWithTemplate(
3745         context, device, descriptorSet,
3746         descriptorUpdateTemplate, pData);
3747 }
3748 
on_vkGetPhysicalDeviceImageFormatProperties2(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)3749 VkResult ResourceTracker::on_vkGetPhysicalDeviceImageFormatProperties2(
3750     void* context, VkResult input_result,
3751     VkPhysicalDevice physicalDevice,
3752     const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
3753     VkImageFormatProperties2* pImageFormatProperties) {
3754     return mImpl->on_vkGetPhysicalDeviceImageFormatProperties2(
3755         context, input_result, physicalDevice, pImageFormatInfo,
3756         pImageFormatProperties);
3757 }
3758 
on_vkGetPhysicalDeviceImageFormatProperties2KHR(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)3759 VkResult ResourceTracker::on_vkGetPhysicalDeviceImageFormatProperties2KHR(
3760     void* context, VkResult input_result,
3761     VkPhysicalDevice physicalDevice,
3762     const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
3763     VkImageFormatProperties2* pImageFormatProperties) {
3764     return mImpl->on_vkGetPhysicalDeviceImageFormatProperties2KHR(
3765         context, input_result, physicalDevice, pImageFormatInfo,
3766         pImageFormatProperties);
3767 }
3768 
on_vkBeginCommandBuffer(void * context,VkResult input_result,VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo)3769 VkResult ResourceTracker::on_vkBeginCommandBuffer(
3770     void* context, VkResult input_result,
3771     VkCommandBuffer commandBuffer,
3772     const VkCommandBufferBeginInfo* pBeginInfo) {
3773     return mImpl->on_vkBeginCommandBuffer(
3774         context, input_result, commandBuffer, pBeginInfo);
3775 }
3776 
on_vkEndCommandBuffer(void * context,VkResult input_result,VkCommandBuffer commandBuffer)3777 VkResult ResourceTracker::on_vkEndCommandBuffer(
3778     void* context, VkResult input_result,
3779     VkCommandBuffer commandBuffer) {
3780     return mImpl->on_vkEndCommandBuffer(
3781         context, input_result, commandBuffer);
3782 }
3783 
on_vkResetCommandBuffer(void * context,VkResult input_result,VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)3784 VkResult ResourceTracker::on_vkResetCommandBuffer(
3785     void* context, VkResult input_result,
3786     VkCommandBuffer commandBuffer,
3787     VkCommandBufferResetFlags flags) {
3788     return mImpl->on_vkResetCommandBuffer(
3789         context, input_result, commandBuffer, flags);
3790 }
3791 
on_vkCreateImageView(void * context,VkResult input_result,VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)3792 VkResult ResourceTracker::on_vkCreateImageView(
3793     void* context, VkResult input_result,
3794     VkDevice device,
3795     const VkImageViewCreateInfo* pCreateInfo,
3796     const VkAllocationCallbacks* pAllocator,
3797     VkImageView* pView) {
3798     return mImpl->on_vkCreateImageView(
3799         context, input_result, device, pCreateInfo, pAllocator, pView);
3800 }
3801 
deviceMemoryTransform_tohost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)3802 void ResourceTracker::deviceMemoryTransform_tohost(
3803     VkDeviceMemory* memory, uint32_t memoryCount,
3804     VkDeviceSize* offset, uint32_t offsetCount,
3805     VkDeviceSize* size, uint32_t sizeCount,
3806     uint32_t* typeIndex, uint32_t typeIndexCount,
3807     uint32_t* typeBits, uint32_t typeBitsCount) {
3808     mImpl->deviceMemoryTransform_tohost(
3809         memory, memoryCount,
3810         offset, offsetCount,
3811         size, sizeCount,
3812         typeIndex, typeIndexCount,
3813         typeBits, typeBitsCount);
3814 }
3815 
deviceMemoryTransform_fromhost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)3816 void ResourceTracker::deviceMemoryTransform_fromhost(
3817     VkDeviceMemory* memory, uint32_t memoryCount,
3818     VkDeviceSize* offset, uint32_t offsetCount,
3819     VkDeviceSize* size, uint32_t sizeCount,
3820     uint32_t* typeIndex, uint32_t typeIndexCount,
3821     uint32_t* typeBits, uint32_t typeBitsCount) {
3822     mImpl->deviceMemoryTransform_fromhost(
3823         memory, memoryCount,
3824         offset, offsetCount,
3825         size, sizeCount,
3826         typeIndex, typeIndexCount,
3827         typeBits, typeBitsCount);
3828 }
3829 
3830 #define DEFINE_TRANSFORMED_TYPE_IMPL(type) \
3831     void ResourceTracker::transformImpl_##type##_tohost(const type*, uint32_t) { } \
3832     void ResourceTracker::transformImpl_##type##_fromhost(const type*, uint32_t) { } \
3833 
3834 LIST_TRANSFORMED_TYPES(DEFINE_TRANSFORMED_TYPE_IMPL)
3835 
3836 } // namespace goldfish_vk
3837