1 // Copyright (C) 2018 The Android Open Source Project
2 // Copyright (C) 2018 Google Inc.
3 //
4 // Licensed under the Apache License, Version 2.0 (the "License");
5 // you may not use this file except in compliance with the License.
6 // You may obtain a copy of the License at
7 //
8 // http://www.apache.org/licenses/LICENSE-2.0
9 //
10 // Unless required by applicable law or agreed to in writing, software
11 // distributed under the License is distributed on an "AS IS" BASIS,
12 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 // See the License for the specific language governing permissions and
14 // limitations under the License.
15
16 #include "ResourceTracker.h"
17
18 #include "android/base/threads/AndroidWorkPool.h"
19
20 #include "goldfish_vk_private_defs.h"
21
22 #include "../OpenglSystemCommon/EmulatorFeatureInfo.h"
23 #include "../OpenglSystemCommon/HostConnection.h"
24
25 #ifdef VK_USE_PLATFORM_ANDROID_KHR
26
27 #include "../egl/goldfish_sync.h"
28
29 typedef uint32_t zx_handle_t;
30 #define ZX_HANDLE_INVALID ((zx_handle_t)0)
zx_handle_close(zx_handle_t)31 void zx_handle_close(zx_handle_t) { }
zx_event_create(int,zx_handle_t *)32 void zx_event_create(int, zx_handle_t*) { }
33
34 #include "AndroidHardwareBuffer.h"
35
36 #ifndef HOST_BUILD
37 #include <drm/virtgpu_drm.h>
38 #include <xf86drm.h>
39 #endif
40
41 #include "VirtioGpuNext.h"
42
43 #endif // VK_USE_PLATFORM_ANDROID_KHR
44
45 #ifdef VK_USE_PLATFORM_FUCHSIA
46
47 #include <cutils/native_handle.h>
48 #include <fuchsia/hardware/goldfish/cpp/fidl.h>
49 #include <fuchsia/sysmem/cpp/fidl.h>
50 #include <lib/zx/channel.h>
51 #include <lib/zx/vmo.h>
52 #include <zircon/process.h>
53 #include <zircon/syscalls.h>
54 #include <zircon/syscalls/object.h>
55
56 #include "services/service_connector.h"
57
58 struct AHardwareBuffer;
59
AHardwareBuffer_release(AHardwareBuffer *)60 void AHardwareBuffer_release(AHardwareBuffer*) { }
61
AHardwareBuffer_getNativeHandle(AHardwareBuffer *)62 native_handle_t *AHardwareBuffer_getNativeHandle(AHardwareBuffer*) { return NULL; }
63
getAndroidHardwareBufferUsageFromVkUsage(const VkImageCreateFlags vk_create,const VkImageUsageFlags vk_usage)64 uint64_t getAndroidHardwareBufferUsageFromVkUsage(
65 const VkImageCreateFlags vk_create,
66 const VkImageUsageFlags vk_usage) {
67 return AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
68 }
69
importAndroidHardwareBuffer(Gralloc * grallocHelper,const VkImportAndroidHardwareBufferInfoANDROID * info,struct AHardwareBuffer ** importOut)70 VkResult importAndroidHardwareBuffer(
71 Gralloc *grallocHelper,
72 const VkImportAndroidHardwareBufferInfoANDROID* info,
73 struct AHardwareBuffer **importOut) {
74 return VK_SUCCESS;
75 }
76
createAndroidHardwareBuffer(bool hasDedicatedImage,bool hasDedicatedBuffer,const VkExtent3D & imageExtent,uint32_t imageLayers,VkFormat imageFormat,VkImageUsageFlags imageUsage,VkImageCreateFlags imageCreateFlags,VkDeviceSize bufferSize,VkDeviceSize allocationInfoAllocSize,struct AHardwareBuffer ** out)77 VkResult createAndroidHardwareBuffer(
78 bool hasDedicatedImage,
79 bool hasDedicatedBuffer,
80 const VkExtent3D& imageExtent,
81 uint32_t imageLayers,
82 VkFormat imageFormat,
83 VkImageUsageFlags imageUsage,
84 VkImageCreateFlags imageCreateFlags,
85 VkDeviceSize bufferSize,
86 VkDeviceSize allocationInfoAllocSize,
87 struct AHardwareBuffer **out) {
88 return VK_SUCCESS;
89 }
90
91 namespace goldfish_vk {
92 struct HostVisibleMemoryVirtualizationInfo;
93 }
94
getAndroidHardwareBufferPropertiesANDROID(Gralloc * grallocHelper,const goldfish_vk::HostVisibleMemoryVirtualizationInfo *,VkDevice,const AHardwareBuffer *,VkAndroidHardwareBufferPropertiesANDROID *)95 VkResult getAndroidHardwareBufferPropertiesANDROID(
96 Gralloc *grallocHelper,
97 const goldfish_vk::HostVisibleMemoryVirtualizationInfo*,
98 VkDevice,
99 const AHardwareBuffer*,
100 VkAndroidHardwareBufferPropertiesANDROID*) { return VK_SUCCESS; }
101
getMemoryAndroidHardwareBufferANDROID(struct AHardwareBuffer **)102 VkResult getMemoryAndroidHardwareBufferANDROID(struct AHardwareBuffer **) { return VK_SUCCESS; }
103
104 #endif // VK_USE_PLATFORM_FUCHSIA
105
106 #include "HostVisibleMemoryVirtualization.h"
107 #include "Resources.h"
108 #include "VkEncoder.h"
109
110 #include "android/base/AlignedBuf.h"
111 #include "android/base/synchronization/AndroidLock.h"
112
113 #include "goldfish_address_space.h"
114 #include "goldfish_vk_private_defs.h"
115 #include "vk_format_info.h"
116 #include "vk_util.h"
117
118 #include <set>
119 #include <string>
120 #include <unordered_map>
121 #include <unordered_set>
122
123 #include <vndk/hardware_buffer.h>
124 #include <log/log.h>
125 #include <stdlib.h>
126 #include <sync/sync.h>
127
128 #ifdef VK_USE_PLATFORM_ANDROID_KHR
129
130 #include <sys/mman.h>
131 #include <sys/syscall.h>
132
133 #ifdef HOST_BUILD
134 #include "android/utils/tempfile.h"
135 #endif
136
137 static inline int
inline_memfd_create(const char * name,unsigned int flags)138 inline_memfd_create(const char *name, unsigned int flags) {
139 #ifdef HOST_BUILD
140 TempFile* tmpFile = tempfile_create();
141 return open(tempfile_path(tmpFile), O_RDWR);
142 // TODO: Windows is not suppose to support VkSemaphoreGetFdInfoKHR
143 #else
144 return syscall(SYS_memfd_create, name, flags);
145 #endif
146 }
147 #define memfd_create inline_memfd_create
148 #endif // !VK_USE_PLATFORM_ANDROID_KHR
149
150 #define RESOURCE_TRACKER_DEBUG 0
151
152 #if RESOURCE_TRACKER_DEBUG
153 #undef D
154 #define D(fmt,...) ALOGD("%s: " fmt, __func__, ##__VA_ARGS__);
155 #else
156 #ifndef D
157 #define D(fmt,...)
158 #endif
159 #endif
160
161 using android::aligned_buf_alloc;
162 using android::aligned_buf_free;
163 using android::base::guest::AutoLock;
164 using android::base::guest::Lock;
165 using android::base::guest::WorkPool;
166
167 namespace goldfish_vk {
168
169 #define MAKE_HANDLE_MAPPING_FOREACH(type_name, map_impl, map_to_u64_impl, map_from_u64_impl) \
170 void mapHandles_##type_name(type_name* handles, size_t count) override { \
171 for (size_t i = 0; i < count; ++i) { \
172 map_impl; \
173 } \
174 } \
175 void mapHandles_##type_name##_u64(const type_name* handles, uint64_t* handle_u64s, size_t count) override { \
176 for (size_t i = 0; i < count; ++i) { \
177 map_to_u64_impl; \
178 } \
179 } \
180 void mapHandles_u64_##type_name(const uint64_t* handle_u64s, type_name* handles, size_t count) override { \
181 for (size_t i = 0; i < count; ++i) { \
182 map_from_u64_impl; \
183 } \
184 } \
185
186 #define DEFINE_RESOURCE_TRACKING_CLASS(class_name, impl) \
187 class class_name : public VulkanHandleMapping { \
188 public: \
189 virtual ~class_name() { } \
190 GOLDFISH_VK_LIST_HANDLE_TYPES(impl) \
191 }; \
192
193 #define CREATE_MAPPING_IMPL_FOR_TYPE(type_name) \
194 MAKE_HANDLE_MAPPING_FOREACH(type_name, \
195 handles[i] = new_from_host_##type_name(handles[i]); ResourceTracker::get()->register_##type_name(handles[i]);, \
196 handle_u64s[i] = (uint64_t)new_from_host_##type_name(handles[i]), \
197 handles[i] = (type_name)new_from_host_u64_##type_name(handle_u64s[i]); ResourceTracker::get()->register_##type_name(handles[i]);)
198
199 #define UNWRAP_MAPPING_IMPL_FOR_TYPE(type_name) \
200 MAKE_HANDLE_MAPPING_FOREACH(type_name, \
201 handles[i] = get_host_##type_name(handles[i]), \
202 handle_u64s[i] = (uint64_t)get_host_u64_##type_name(handles[i]), \
203 handles[i] = (type_name)get_host_##type_name((type_name)handle_u64s[i]))
204
205 #define DESTROY_MAPPING_IMPL_FOR_TYPE(type_name) \
206 MAKE_HANDLE_MAPPING_FOREACH(type_name, \
207 ResourceTracker::get()->unregister_##type_name(handles[i]); delete_goldfish_##type_name(handles[i]), \
208 (void)handle_u64s[i]; delete_goldfish_##type_name(handles[i]), \
209 (void)handles[i]; delete_goldfish_##type_name((type_name)handle_u64s[i]))
210
211 DEFINE_RESOURCE_TRACKING_CLASS(CreateMapping, CREATE_MAPPING_IMPL_FOR_TYPE)
212 DEFINE_RESOURCE_TRACKING_CLASS(UnwrapMapping, UNWRAP_MAPPING_IMPL_FOR_TYPE)
213 DEFINE_RESOURCE_TRACKING_CLASS(DestroyMapping, DESTROY_MAPPING_IMPL_FOR_TYPE)
214
215 class ResourceTracker::Impl {
216 public:
217 Impl() = default;
218 CreateMapping createMapping;
219 UnwrapMapping unwrapMapping;
220 DestroyMapping destroyMapping;
221 DefaultHandleMapping defaultMapping;
222
223 #define HANDLE_DEFINE_TRIVIAL_INFO_STRUCT(type) \
224 struct type##_Info { \
225 uint32_t unused; \
226 }; \
227
228 GOLDFISH_VK_LIST_TRIVIAL_HANDLE_TYPES(HANDLE_DEFINE_TRIVIAL_INFO_STRUCT)
229
230 struct VkInstance_Info {
231 uint32_t highestApiVersion;
232 std::set<std::string> enabledExtensions;
233 // Fodder for vkEnumeratePhysicalDevices.
234 std::vector<VkPhysicalDevice> physicalDevices;
235 };
236
237 using HostMemBlocks = std::vector<HostMemAlloc>;
238 using HostMemBlockIndex = size_t;
239
240 #define INVALID_HOST_MEM_BLOCK (-1)
241
242 struct VkDevice_Info {
243 VkPhysicalDevice physdev;
244 VkPhysicalDeviceProperties props;
245 VkPhysicalDeviceMemoryProperties memProps;
246 std::vector<HostMemBlocks> hostMemBlocks { VK_MAX_MEMORY_TYPES };
247 uint32_t apiVersion;
248 std::set<std::string> enabledExtensions;
249 };
250
251 struct VirtioGpuHostmemResourceInfo {
252 uint32_t resourceId = 0;
253 int primeFd = -1;
254 };
255
256 struct VkDeviceMemory_Info {
257 VkDeviceSize allocationSize = 0;
258 VkDeviceSize mappedSize = 0;
259 uint8_t* mappedPtr = nullptr;
260 uint32_t memoryTypeIndex = 0;
261 bool virtualHostVisibleBacking = false;
262 bool directMapped = false;
263 GoldfishAddressSpaceBlock*
264 goldfishAddressSpaceBlock = nullptr;
265 VirtioGpuHostmemResourceInfo resInfo;
266 SubAlloc subAlloc;
267 AHardwareBuffer* ahw = nullptr;
268 zx_handle_t vmoHandle = ZX_HANDLE_INVALID;
269 };
270
271 struct VkCommandBuffer_Info {
272 VkEncoder** lastUsedEncoderPtr = nullptr;
273 uint32_t sequenceNumber = 0;
274 };
275
276 // custom guest-side structs for images/buffers because of AHardwareBuffer :((
277 struct VkImage_Info {
278 VkDevice device;
279 VkImageCreateInfo createInfo;
280 bool external = false;
281 VkExternalMemoryImageCreateInfo externalCreateInfo;
282 VkDeviceMemory currentBacking = VK_NULL_HANDLE;
283 VkDeviceSize currentBackingOffset = 0;
284 VkDeviceSize currentBackingSize = 0;
285 bool baseRequirementsKnown = false;
286 VkMemoryRequirements baseRequirements;
287 #ifdef VK_USE_PLATFORM_FUCHSIA
288 bool isSysmemBackedMemory = false;
289 #endif
290 };
291
292 struct VkBuffer_Info {
293 VkDevice device;
294 VkBufferCreateInfo createInfo;
295 bool external = false;
296 VkExternalMemoryBufferCreateInfo externalCreateInfo;
297 VkDeviceMemory currentBacking = VK_NULL_HANDLE;
298 VkDeviceSize currentBackingOffset = 0;
299 VkDeviceSize currentBackingSize = 0;
300 bool baseRequirementsKnown = false;
301 VkMemoryRequirements baseRequirements;
302 };
303
304 struct VkSemaphore_Info {
305 VkDevice device;
306 zx_handle_t eventHandle = ZX_HANDLE_INVALID;
307 int syncFd = -1;
308 };
309
310 struct VkDescriptorUpdateTemplate_Info {
311 std::vector<VkDescriptorUpdateTemplateEntry> templateEntries;
312
313 // Flattened versions
314 std::vector<uint32_t> imageInfoEntryIndices;
315 std::vector<uint32_t> bufferInfoEntryIndices;
316 std::vector<uint32_t> bufferViewEntryIndices;
317 std::vector<VkDescriptorImageInfo> imageInfos;
318 std::vector<VkDescriptorBufferInfo> bufferInfos;
319 std::vector<VkBufferView> bufferViews;
320 };
321
322 struct VkFence_Info {
323 VkDevice device;
324 bool external = false;
325 VkExportFenceCreateInfo exportFenceCreateInfo;
326 #ifdef VK_USE_PLATFORM_ANDROID_KHR
327 int syncFd = -1;
328 #endif
329 };
330
331 struct VkDescriptorPool_Info {
332 std::unordered_set<VkDescriptorSet> allocedSets;
333 VkDescriptorPoolCreateFlags createFlags;
334 };
335
336 struct VkDescriptorSet_Info {
337 VkDescriptorPool pool;
338 std::vector<bool> bindingIsImmutableSampler;
339 };
340
341 struct VkDescriptorSetLayout_Info {
342 std::vector<VkDescriptorSetLayoutBinding> bindings;
343 };
344
345 #define HANDLE_REGISTER_IMPL_IMPL(type) \
346 std::unordered_map<type, type##_Info> info_##type; \
347 void register_##type(type obj) { \
348 AutoLock lock(mLock); \
349 info_##type[obj] = type##_Info(); \
350 } \
351
352 #define HANDLE_UNREGISTER_IMPL_IMPL(type) \
353 void unregister_##type(type obj) { \
354 AutoLock lock(mLock); \
355 info_##type.erase(obj); \
356 } \
357
358 GOLDFISH_VK_LIST_HANDLE_TYPES(HANDLE_REGISTER_IMPL_IMPL)
GOLDFISH_VK_LIST_TRIVIAL_HANDLE_TYPES(HANDLE_UNREGISTER_IMPL_IMPL)359 GOLDFISH_VK_LIST_TRIVIAL_HANDLE_TYPES(HANDLE_UNREGISTER_IMPL_IMPL)
360
361 void unregister_VkInstance(VkInstance instance) {
362 AutoLock lock(mLock);
363
364 auto it = info_VkInstance.find(instance);
365 if (it == info_VkInstance.end()) return;
366 auto info = it->second;
367 info_VkInstance.erase(instance);
368 lock.unlock();
369 }
370
unregister_VkDevice(VkDevice device)371 void unregister_VkDevice(VkDevice device) {
372 AutoLock lock(mLock);
373
374 auto it = info_VkDevice.find(device);
375 if (it == info_VkDevice.end()) return;
376 auto info = it->second;
377 info_VkDevice.erase(device);
378 lock.unlock();
379 }
380
unregister_VkCommandBuffer(VkCommandBuffer commandBuffer)381 void unregister_VkCommandBuffer(VkCommandBuffer commandBuffer) {
382 AutoLock lock(mLock);
383
384 auto it = info_VkCommandBuffer.find(commandBuffer);
385 if (it == info_VkCommandBuffer.end()) return;
386 auto& info = it->second;
387 auto lastUsedEncoder =
388 info.lastUsedEncoderPtr ?
389 *(info.lastUsedEncoderPtr) : nullptr;
390
391 if (lastUsedEncoder) {
392 lastUsedEncoder->unregisterCleanupCallback(commandBuffer);
393 delete info.lastUsedEncoderPtr;
394 }
395
396 info_VkCommandBuffer.erase(commandBuffer);
397 }
398
unregister_VkDeviceMemory(VkDeviceMemory mem)399 void unregister_VkDeviceMemory(VkDeviceMemory mem) {
400 AutoLock lock(mLock);
401
402 auto it = info_VkDeviceMemory.find(mem);
403 if (it == info_VkDeviceMemory.end()) return;
404
405 auto& memInfo = it->second;
406
407 if (memInfo.ahw) {
408 AHardwareBuffer_release(memInfo.ahw);
409 }
410
411 if (memInfo.vmoHandle != ZX_HANDLE_INVALID) {
412 zx_handle_close(memInfo.vmoHandle);
413 }
414
415 if (memInfo.mappedPtr &&
416 !memInfo.virtualHostVisibleBacking &&
417 !memInfo.directMapped) {
418 aligned_buf_free(memInfo.mappedPtr);
419 }
420
421 if (memInfo.directMapped) {
422 subFreeHostMemory(&memInfo.subAlloc);
423 }
424
425 delete memInfo.goldfishAddressSpaceBlock;
426
427 info_VkDeviceMemory.erase(mem);
428 }
429
unregister_VkImage(VkImage img)430 void unregister_VkImage(VkImage img) {
431 AutoLock lock(mLock);
432
433 auto it = info_VkImage.find(img);
434 if (it == info_VkImage.end()) return;
435
436 auto& imageInfo = it->second;
437
438 info_VkImage.erase(img);
439 }
440
unregister_VkBuffer(VkBuffer buf)441 void unregister_VkBuffer(VkBuffer buf) {
442 AutoLock lock(mLock);
443
444 auto it = info_VkBuffer.find(buf);
445 if (it == info_VkBuffer.end()) return;
446
447 info_VkBuffer.erase(buf);
448 }
449
unregister_VkSemaphore(VkSemaphore sem)450 void unregister_VkSemaphore(VkSemaphore sem) {
451 AutoLock lock(mLock);
452
453 auto it = info_VkSemaphore.find(sem);
454 if (it == info_VkSemaphore.end()) return;
455
456 auto& semInfo = it->second;
457
458 if (semInfo.eventHandle != ZX_HANDLE_INVALID) {
459 zx_handle_close(semInfo.eventHandle);
460 }
461
462 info_VkSemaphore.erase(sem);
463 }
464
unregister_VkDescriptorUpdateTemplate(VkDescriptorUpdateTemplate templ)465 void unregister_VkDescriptorUpdateTemplate(VkDescriptorUpdateTemplate templ) {
466 info_VkDescriptorUpdateTemplate.erase(templ);
467 }
468
unregister_VkFence(VkFence fence)469 void unregister_VkFence(VkFence fence) {
470 AutoLock lock(mLock);
471 auto it = info_VkFence.find(fence);
472 if (it == info_VkFence.end()) return;
473
474 auto& fenceInfo = it->second;
475 (void)fenceInfo;
476
477 #ifdef VK_USE_PLATFORM_ANDROID_KHR
478 if (fenceInfo.syncFd >= 0) {
479 close(fenceInfo.syncFd);
480 }
481 #endif
482
483 info_VkFence.erase(fence);
484 }
485
unregister_VkDescriptorSet_locked(VkDescriptorSet set)486 void unregister_VkDescriptorSet_locked(VkDescriptorSet set) {
487 auto it = info_VkDescriptorSet.find(set);
488 if (it == info_VkDescriptorSet.end()) return;
489
490 const auto& setInfo = it->second;
491
492 auto poolIt = info_VkDescriptorPool.find(setInfo.pool);
493
494 info_VkDescriptorSet.erase(set);
495
496 if (poolIt == info_VkDescriptorPool.end()) return;
497
498 auto& poolInfo = poolIt->second;
499 poolInfo.allocedSets.erase(set);
500 }
501
unregister_VkDescriptorSet(VkDescriptorSet set)502 void unregister_VkDescriptorSet(VkDescriptorSet set) {
503 AutoLock lock(mLock);
504 unregister_VkDescriptorSet_locked(set);
505 }
506
unregister_VkDescriptorSetLayout(VkDescriptorSetLayout setLayout)507 void unregister_VkDescriptorSetLayout(VkDescriptorSetLayout setLayout) {
508 AutoLock lock(mLock);
509 info_VkDescriptorSetLayout.erase(setLayout);
510 }
511
initDescriptorSetStateLocked(const VkDescriptorSetAllocateInfo * ci,const VkDescriptorSet * sets)512 void initDescriptorSetStateLocked(const VkDescriptorSetAllocateInfo* ci, const VkDescriptorSet* sets) {
513 auto it = info_VkDescriptorPool.find(ci->descriptorPool);
514 if (it == info_VkDescriptorPool.end()) return;
515
516 auto& info = it->second;
517 for (uint32_t i = 0; i < ci->descriptorSetCount; ++i) {
518 info.allocedSets.insert(sets[i]);
519
520 auto setIt = info_VkDescriptorSet.find(sets[i]);
521 if (setIt == info_VkDescriptorSet.end()) continue;
522
523 auto& setInfo = setIt->second;
524 setInfo.pool = ci->descriptorPool;
525
526 VkDescriptorSetLayout setLayout = ci->pSetLayouts[i];
527 auto layoutIt = info_VkDescriptorSetLayout.find(setLayout);
528 if (layoutIt == info_VkDescriptorSetLayout.end()) continue;
529
530 const auto& layoutInfo = layoutIt->second;
531 for (size_t i = 0; i < layoutInfo.bindings.size(); ++i) {
532 // Bindings can be sparsely defined
533 const auto& binding = layoutInfo.bindings[i];
534 uint32_t bindingIndex = binding.binding;
535 if (setInfo.bindingIsImmutableSampler.size() <= bindingIndex) {
536 setInfo.bindingIsImmutableSampler.resize(bindingIndex + 1, false);
537 }
538 setInfo.bindingIsImmutableSampler[bindingIndex] =
539 binding.descriptorCount > 0 &&
540 (binding.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER ||
541 binding.descriptorType ==
542 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
543 binding.pImmutableSamplers;
544 }
545 }
546 }
547
548 VkWriteDescriptorSet
createImmutableSamplersFilteredWriteDescriptorSetLocked(const VkWriteDescriptorSet * descriptorWrite,std::vector<VkDescriptorImageInfo> * imageInfoArray)549 createImmutableSamplersFilteredWriteDescriptorSetLocked(
550 const VkWriteDescriptorSet* descriptorWrite,
551 std::vector<VkDescriptorImageInfo>* imageInfoArray) {
552
553 VkWriteDescriptorSet res = *descriptorWrite;
554
555 if (descriptorWrite->descriptorCount == 0) return res;
556
557 if (descriptorWrite->descriptorType != VK_DESCRIPTOR_TYPE_SAMPLER &&
558 descriptorWrite->descriptorType != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) return res;
559
560 VkDescriptorSet set = descriptorWrite->dstSet;
561 auto descSetIt = info_VkDescriptorSet.find(set);
562 if (descSetIt == info_VkDescriptorSet.end()) {
563 ALOGE("%s: error: descriptor set 0x%llx not found\n", __func__,
564 (unsigned long long)set);
565 return res;
566 }
567
568 const auto& descInfo = descSetIt->second;
569 uint32_t binding = descriptorWrite->dstBinding;
570
571 bool immutableSampler = descInfo.bindingIsImmutableSampler[binding];
572
573 if (!immutableSampler) return res;
574
575 for (uint32_t i = 0; i < descriptorWrite->descriptorCount; ++i) {
576 VkDescriptorImageInfo imageInfo = descriptorWrite->pImageInfo[i];
577 imageInfo.sampler = 0;
578 imageInfoArray->push_back(imageInfo);
579 }
580
581 res.pImageInfo = imageInfoArray->data();
582
583 return res;
584 }
585
586 // Also unregisters underlying descriptor sets
587 // and deletes their guest-side wrapped handles.
clearDescriptorPoolLocked(VkDescriptorPool pool)588 void clearDescriptorPoolLocked(VkDescriptorPool pool) {
589 auto it = info_VkDescriptorPool.find(pool);
590 if (it == info_VkDescriptorPool.end()) return;
591
592 std::vector<VkDescriptorSet> toClear;
593 for (auto set : it->second.allocedSets) {
594 toClear.push_back(set);
595 }
596
597 for (auto set : toClear) {
598 unregister_VkDescriptorSet_locked(set);
599 delete_goldfish_VkDescriptorSet(set);
600 }
601 }
602
unregister_VkDescriptorPool(VkDescriptorPool pool)603 void unregister_VkDescriptorPool(VkDescriptorPool pool) {
604 AutoLock lock(mLock);
605 clearDescriptorPoolLocked(pool);
606 info_VkDescriptorPool.erase(pool);
607 }
608
descriptorPoolSupportsIndividualFreeLocked(VkDescriptorPool pool)609 bool descriptorPoolSupportsIndividualFreeLocked(VkDescriptorPool pool) {
610 auto it = info_VkDescriptorPool.find(pool);
611 if (it == info_VkDescriptorPool.end()) return false;
612
613 const auto& info = it->second;
614
615 return VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT &
616 info.createFlags;
617 }
618
descriptorSetReallyAllocedFromPoolLocked(VkDescriptorSet set,VkDescriptorPool pool)619 bool descriptorSetReallyAllocedFromPoolLocked(VkDescriptorSet set, VkDescriptorPool pool) {
620 auto it = info_VkDescriptorSet.find(set);
621 if (it == info_VkDescriptorSet.end()) return false;
622
623 const auto& info = it->second;
624
625 if (pool != info.pool) return false;
626
627 auto poolIt = info_VkDescriptorPool.find(info.pool);
628 if (poolIt == info_VkDescriptorPool.end()) return false;
629
630 const auto& poolInfo = poolIt->second;
631
632 if (poolInfo.allocedSets.find(set) == poolInfo.allocedSets.end()) return false;
633
634 return true;
635 }
636
637 static constexpr uint32_t kDefaultApiVersion = VK_MAKE_VERSION(1, 1, 0);
638
setInstanceInfo(VkInstance instance,uint32_t enabledExtensionCount,const char * const * ppEnabledExtensionNames,uint32_t apiVersion)639 void setInstanceInfo(VkInstance instance,
640 uint32_t enabledExtensionCount,
641 const char* const* ppEnabledExtensionNames,
642 uint32_t apiVersion) {
643 AutoLock lock(mLock);
644 auto& info = info_VkInstance[instance];
645 info.highestApiVersion = apiVersion;
646
647 if (!ppEnabledExtensionNames) return;
648
649 for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
650 info.enabledExtensions.insert(ppEnabledExtensionNames[i]);
651 }
652 }
653
setDeviceInfo(VkDevice device,VkPhysicalDevice physdev,VkPhysicalDeviceProperties props,VkPhysicalDeviceMemoryProperties memProps,uint32_t enabledExtensionCount,const char * const * ppEnabledExtensionNames)654 void setDeviceInfo(VkDevice device,
655 VkPhysicalDevice physdev,
656 VkPhysicalDeviceProperties props,
657 VkPhysicalDeviceMemoryProperties memProps,
658 uint32_t enabledExtensionCount,
659 const char* const* ppEnabledExtensionNames) {
660 AutoLock lock(mLock);
661 auto& info = info_VkDevice[device];
662 info.physdev = physdev;
663 info.props = props;
664 info.memProps = memProps;
665 initHostVisibleMemoryVirtualizationInfo(
666 physdev, &memProps,
667 mFeatureInfo.get(),
668 &mHostVisibleMemoryVirtInfo);
669 info.apiVersion = props.apiVersion;
670
671 if (!ppEnabledExtensionNames) return;
672
673 for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
674 info.enabledExtensions.insert(ppEnabledExtensionNames[i]);
675 }
676 }
677
setDeviceMemoryInfo(VkDevice device,VkDeviceMemory memory,VkDeviceSize allocationSize,VkDeviceSize mappedSize,uint8_t * ptr,uint32_t memoryTypeIndex,AHardwareBuffer * ahw=nullptr,zx_handle_t vmoHandle=ZX_HANDLE_INVALID)678 void setDeviceMemoryInfo(VkDevice device,
679 VkDeviceMemory memory,
680 VkDeviceSize allocationSize,
681 VkDeviceSize mappedSize,
682 uint8_t* ptr,
683 uint32_t memoryTypeIndex,
684 AHardwareBuffer* ahw = nullptr,
685 zx_handle_t vmoHandle = ZX_HANDLE_INVALID) {
686 AutoLock lock(mLock);
687 auto& deviceInfo = info_VkDevice[device];
688 auto& info = info_VkDeviceMemory[memory];
689
690 info.allocationSize = allocationSize;
691 info.mappedSize = mappedSize;
692 info.mappedPtr = ptr;
693 info.memoryTypeIndex = memoryTypeIndex;
694 info.ahw = ahw;
695 info.vmoHandle = vmoHandle;
696 }
697
setImageInfo(VkImage image,VkDevice device,const VkImageCreateInfo * pCreateInfo)698 void setImageInfo(VkImage image,
699 VkDevice device,
700 const VkImageCreateInfo *pCreateInfo) {
701 AutoLock lock(mLock);
702 auto& info = info_VkImage[image];
703
704 info.device = device;
705 info.createInfo = *pCreateInfo;
706 }
707
isMemoryTypeHostVisible(VkDevice device,uint32_t typeIndex) const708 bool isMemoryTypeHostVisible(VkDevice device, uint32_t typeIndex) const {
709 AutoLock lock(mLock);
710 const auto it = info_VkDevice.find(device);
711
712 if (it == info_VkDevice.end()) return false;
713
714 const auto& info = it->second;
715 return info.memProps.memoryTypes[typeIndex].propertyFlags &
716 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
717 }
718
getMappedPointer(VkDeviceMemory memory)719 uint8_t* getMappedPointer(VkDeviceMemory memory) {
720 AutoLock lock(mLock);
721 const auto it = info_VkDeviceMemory.find(memory);
722 if (it == info_VkDeviceMemory.end()) return nullptr;
723
724 const auto& info = it->second;
725 return info.mappedPtr;
726 }
727
getMappedSize(VkDeviceMemory memory)728 VkDeviceSize getMappedSize(VkDeviceMemory memory) {
729 AutoLock lock(mLock);
730 const auto it = info_VkDeviceMemory.find(memory);
731 if (it == info_VkDeviceMemory.end()) return 0;
732
733 const auto& info = it->second;
734 return info.mappedSize;
735 }
736
getNonCoherentExtendedSize(VkDevice device,VkDeviceSize basicSize) const737 VkDeviceSize getNonCoherentExtendedSize(VkDevice device, VkDeviceSize basicSize) const {
738 AutoLock lock(mLock);
739 const auto it = info_VkDevice.find(device);
740 if (it == info_VkDevice.end()) return basicSize;
741 const auto& info = it->second;
742
743 VkDeviceSize nonCoherentAtomSize =
744 info.props.limits.nonCoherentAtomSize;
745 VkDeviceSize atoms =
746 (basicSize + nonCoherentAtomSize - 1) / nonCoherentAtomSize;
747 return atoms * nonCoherentAtomSize;
748 }
749
isValidMemoryRange(const VkMappedMemoryRange & range) const750 bool isValidMemoryRange(const VkMappedMemoryRange& range) const {
751 AutoLock lock(mLock);
752 const auto it = info_VkDeviceMemory.find(range.memory);
753 if (it == info_VkDeviceMemory.end()) return false;
754 const auto& info = it->second;
755
756 if (!info.mappedPtr) return false;
757
758 VkDeviceSize offset = range.offset;
759 VkDeviceSize size = range.size;
760
761 if (size == VK_WHOLE_SIZE) {
762 return offset <= info.mappedSize;
763 }
764
765 return offset + size <= info.mappedSize;
766 }
767
setupFeatures(const EmulatorFeatureInfo * features)768 void setupFeatures(const EmulatorFeatureInfo* features) {
769 if (!features || mFeatureInfo) return;
770 mFeatureInfo.reset(new EmulatorFeatureInfo);
771 *mFeatureInfo = *features;
772
773 if (mFeatureInfo->hasDirectMem) {
774 mGoldfishAddressSpaceBlockProvider.reset(
775 new GoldfishAddressSpaceBlockProvider(
776 GoldfishAddressSpaceSubdeviceType::NoSubdevice));
777 }
778
779 #ifdef VK_USE_PLATFORM_FUCHSIA
780 if (mFeatureInfo->hasVulkan) {
781 zx::channel channel(GetConnectToServiceFunction()("/dev/class/goldfish-control/000"));
782 if (!channel) {
783 ALOGE("failed to open control device");
784 abort();
785 }
786 mControlDevice.Bind(std::move(channel));
787
788 zx::channel sysmem_channel(GetConnectToServiceFunction()("/svc/fuchsia.sysmem.Allocator"));
789 if (!sysmem_channel) {
790 ALOGE("failed to open sysmem connection");
791 }
792 mSysmemAllocator.Bind(std::move(sysmem_channel));
793 }
794 #endif
795
796 if (mFeatureInfo->hasVulkanNullOptionalStrings) {
797 mStreamFeatureBits |= VULKAN_STREAM_FEATURE_NULL_OPTIONAL_STRINGS_BIT;
798 }
799 if (mFeatureInfo->hasVulkanIgnoredHandles) {
800 mStreamFeatureBits |= VULKAN_STREAM_FEATURE_IGNORED_HANDLES_BIT;
801 }
802
803 #if !defined(HOST_BUILD) && defined(VK_USE_PLATFORM_ANDROID_KHR)
804 if (mFeatureInfo->hasVirtioGpuNext) {
805 ALOGD("%s: has virtio-gpu-next; create hostmem rendernode\n", __func__);
806 mRendernodeFd = drmOpenRender(128 /* RENDERNODE_MINOR */);
807 }
808 #endif
809 }
810
setThreadingCallbacks(const ResourceTracker::ThreadingCallbacks & callbacks)811 void setThreadingCallbacks(const ResourceTracker::ThreadingCallbacks& callbacks) {
812 mThreadingCallbacks = callbacks;
813 }
814
hostSupportsVulkan() const815 bool hostSupportsVulkan() const {
816 if (!mFeatureInfo) return false;
817
818 return mFeatureInfo->hasVulkan;
819 }
820
usingDirectMapping() const821 bool usingDirectMapping() const {
822 return mHostVisibleMemoryVirtInfo.virtualizationSupported;
823 }
824
getStreamFeatures() const825 uint32_t getStreamFeatures() const {
826 return mStreamFeatureBits;
827 }
828
supportsDeferredCommands() const829 bool supportsDeferredCommands() const {
830 if (!mFeatureInfo) return false;
831 return mFeatureInfo->hasDeferredVulkanCommands;
832 }
833
supportsCreateResourcesWithRequirements() const834 bool supportsCreateResourcesWithRequirements() const {
835 if (!mFeatureInfo) return false;
836 return mFeatureInfo->hasVulkanCreateResourcesWithRequirements;
837 }
838
getHostInstanceExtensionIndex(const std::string & extName) const839 int getHostInstanceExtensionIndex(const std::string& extName) const {
840 int i = 0;
841 for (const auto& prop : mHostInstanceExtensions) {
842 if (extName == std::string(prop.extensionName)) {
843 return i;
844 }
845 ++i;
846 }
847 return -1;
848 }
849
getHostDeviceExtensionIndex(const std::string & extName) const850 int getHostDeviceExtensionIndex(const std::string& extName) const {
851 int i = 0;
852 for (const auto& prop : mHostDeviceExtensions) {
853 if (extName == std::string(prop.extensionName)) {
854 return i;
855 }
856 ++i;
857 }
858 return -1;
859 }
860
deviceMemoryTransform_tohost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)861 void deviceMemoryTransform_tohost(
862 VkDeviceMemory* memory, uint32_t memoryCount,
863 VkDeviceSize* offset, uint32_t offsetCount,
864 VkDeviceSize* size, uint32_t sizeCount,
865 uint32_t* typeIndex, uint32_t typeIndexCount,
866 uint32_t* typeBits, uint32_t typeBitsCount) {
867
868 (void)memoryCount;
869 (void)offsetCount;
870 (void)sizeCount;
871
872 const auto& hostVirt =
873 mHostVisibleMemoryVirtInfo;
874
875 if (!hostVirt.virtualizationSupported) return;
876
877 if (memory) {
878 AutoLock lock (mLock);
879
880 for (uint32_t i = 0; i < memoryCount; ++i) {
881 VkDeviceMemory mem = memory[i];
882
883 auto it = info_VkDeviceMemory.find(mem);
884 if (it == info_VkDeviceMemory.end()) return;
885
886 const auto& info = it->second;
887
888 if (!info.directMapped) continue;
889
890 memory[i] = info.subAlloc.baseMemory;
891
892 if (offset) {
893 offset[i] = info.subAlloc.baseOffset + offset[i];
894 }
895
896 if (size) {
897 if (size[i] == VK_WHOLE_SIZE) {
898 size[i] = info.subAlloc.subMappedSize;
899 }
900 }
901
902 // TODO
903 (void)memory;
904 (void)offset;
905 (void)size;
906 }
907 }
908
909 for (uint32_t i = 0; i < typeIndexCount; ++i) {
910 typeIndex[i] =
911 hostVirt.memoryTypeIndexMappingToHost[typeIndex[i]];
912 }
913
914 for (uint32_t i = 0; i < typeBitsCount; ++i) {
915 uint32_t bits = 0;
916 for (uint32_t j = 0; j < VK_MAX_MEMORY_TYPES; ++j) {
917 bool guestHas = typeBits[i] & (1 << j);
918 uint32_t hostIndex =
919 hostVirt.memoryTypeIndexMappingToHost[j];
920 bits |= guestHas ? (1 << hostIndex) : 0;
921 }
922 typeBits[i] = bits;
923 }
924 }
925
deviceMemoryTransform_fromhost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)926 void deviceMemoryTransform_fromhost(
927 VkDeviceMemory* memory, uint32_t memoryCount,
928 VkDeviceSize* offset, uint32_t offsetCount,
929 VkDeviceSize* size, uint32_t sizeCount,
930 uint32_t* typeIndex, uint32_t typeIndexCount,
931 uint32_t* typeBits, uint32_t typeBitsCount) {
932
933 (void)memoryCount;
934 (void)offsetCount;
935 (void)sizeCount;
936
937 const auto& hostVirt =
938 mHostVisibleMemoryVirtInfo;
939
940 if (!hostVirt.virtualizationSupported) return;
941
942 AutoLock lock (mLock);
943
944 for (uint32_t i = 0; i < memoryCount; ++i) {
945 // TODO
946 (void)memory;
947 (void)offset;
948 (void)size;
949 }
950
951 for (uint32_t i = 0; i < typeIndexCount; ++i) {
952 typeIndex[i] =
953 hostVirt.memoryTypeIndexMappingFromHost[typeIndex[i]];
954 }
955
956 for (uint32_t i = 0; i < typeBitsCount; ++i) {
957 uint32_t bits = 0;
958 for (uint32_t j = 0; j < VK_MAX_MEMORY_TYPES; ++j) {
959 bool hostHas = typeBits[i] & (1 << j);
960 uint32_t guestIndex =
961 hostVirt.memoryTypeIndexMappingFromHost[j];
962 bits |= hostHas ? (1 << guestIndex) : 0;
963
964 if (hostVirt.memoryTypeBitsShouldAdvertiseBoth[j]) {
965 bits |= hostHas ? (1 << j) : 0;
966 }
967 }
968 typeBits[i] = bits;
969 }
970 }
971
on_vkEnumerateInstanceExtensionProperties(void * context,VkResult,const char *,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)972 VkResult on_vkEnumerateInstanceExtensionProperties(
973 void* context,
974 VkResult,
975 const char*,
976 uint32_t* pPropertyCount,
977 VkExtensionProperties* pProperties) {
978 std::vector<const char*> allowedExtensionNames = {
979 "VK_KHR_get_physical_device_properties2",
980 "VK_KHR_sampler_ycbcr_conversion",
981 #ifdef VK_USE_PLATFORM_ANDROID_KHR
982 "VK_KHR_external_semaphore_capabilities",
983 "VK_KHR_external_memory_capabilities",
984 "VK_KHR_external_fence_capabilities",
985 #endif
986 // TODO:
987 // VK_KHR_external_memory_capabilities
988 };
989
990 VkEncoder* enc = (VkEncoder*)context;
991
992 // Only advertise a select set of extensions.
993 if (mHostInstanceExtensions.empty()) {
994 uint32_t hostPropCount = 0;
995 enc->vkEnumerateInstanceExtensionProperties(nullptr, &hostPropCount, nullptr);
996 mHostInstanceExtensions.resize(hostPropCount);
997
998 VkResult hostRes =
999 enc->vkEnumerateInstanceExtensionProperties(
1000 nullptr, &hostPropCount, mHostInstanceExtensions.data());
1001
1002 if (hostRes != VK_SUCCESS) {
1003 return hostRes;
1004 }
1005 }
1006
1007 std::vector<VkExtensionProperties> filteredExts;
1008
1009 for (size_t i = 0; i < allowedExtensionNames.size(); ++i) {
1010 auto extIndex = getHostInstanceExtensionIndex(allowedExtensionNames[i]);
1011 if (extIndex != -1) {
1012 filteredExts.push_back(mHostInstanceExtensions[extIndex]);
1013 }
1014 }
1015
1016 VkExtensionProperties anbExtProps[] = {
1017 #ifdef VK_USE_PLATFORM_FUCHSIA
1018 { "VK_KHR_external_memory_capabilities", 1},
1019 { "VK_KHR_external_semaphore_capabilities", 1},
1020 #endif
1021 };
1022
1023 for (auto& anbExtProp: anbExtProps) {
1024 filteredExts.push_back(anbExtProp);
1025 }
1026
1027 // Spec:
1028 //
1029 // https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkEnumerateInstanceExtensionProperties.html
1030 //
1031 // If pProperties is NULL, then the number of extensions properties
1032 // available is returned in pPropertyCount. Otherwise, pPropertyCount
1033 // must point to a variable set by the user to the number of elements
1034 // in the pProperties array, and on return the variable is overwritten
1035 // with the number of structures actually written to pProperties. If
1036 // pPropertyCount is less than the number of extension properties
1037 // available, at most pPropertyCount structures will be written. If
1038 // pPropertyCount is smaller than the number of extensions available,
1039 // VK_INCOMPLETE will be returned instead of VK_SUCCESS, to indicate
1040 // that not all the available properties were returned.
1041 //
1042 // pPropertyCount must be a valid pointer to a uint32_t value
1043 if (!pPropertyCount) return VK_ERROR_INITIALIZATION_FAILED;
1044
1045 if (!pProperties) {
1046 *pPropertyCount = (uint32_t)filteredExts.size();
1047 return VK_SUCCESS;
1048 } else {
1049 auto actualExtensionCount = (uint32_t)filteredExts.size();
1050 if (*pPropertyCount > actualExtensionCount) {
1051 *pPropertyCount = actualExtensionCount;
1052 }
1053
1054 for (uint32_t i = 0; i < *pPropertyCount; ++i) {
1055 pProperties[i] = filteredExts[i];
1056 }
1057
1058 if (actualExtensionCount > *pPropertyCount) {
1059 return VK_INCOMPLETE;
1060 }
1061
1062 return VK_SUCCESS;
1063 }
1064 }
1065
on_vkEnumerateDeviceExtensionProperties(void * context,VkResult,VkPhysicalDevice physdev,const char *,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1066 VkResult on_vkEnumerateDeviceExtensionProperties(
1067 void* context,
1068 VkResult,
1069 VkPhysicalDevice physdev,
1070 const char*,
1071 uint32_t* pPropertyCount,
1072 VkExtensionProperties* pProperties) {
1073
1074 std::vector<const char*> allowedExtensionNames = {
1075 "VK_KHR_maintenance1",
1076 "VK_KHR_maintenance2",
1077 "VK_KHR_maintenance3",
1078 "VK_KHR_get_memory_requirements2",
1079 "VK_KHR_dedicated_allocation",
1080 "VK_KHR_bind_memory2",
1081 "VK_KHR_sampler_ycbcr_conversion",
1082 "VK_KHR_shader_float16_int8",
1083 "VK_AMD_gpu_shader_half_float",
1084 "VK_NV_shader_subgroup_partitioned",
1085 #ifdef VK_USE_PLATFORM_ANDROID_KHR
1086 "VK_KHR_external_semaphore",
1087 "VK_KHR_external_semaphore_fd",
1088 // "VK_KHR_external_semaphore_win32", not exposed because it's translated to fd
1089 "VK_KHR_external_memory",
1090 "VK_KHR_external_fence",
1091 "VK_KHR_external_fence_fd",
1092 #endif
1093 // TODO:
1094 // VK_KHR_external_memory_capabilities
1095 };
1096
1097 VkEncoder* enc = (VkEncoder*)context;
1098
1099 if (mHostDeviceExtensions.empty()) {
1100 uint32_t hostPropCount = 0;
1101 enc->vkEnumerateDeviceExtensionProperties(physdev, nullptr, &hostPropCount, nullptr);
1102 mHostDeviceExtensions.resize(hostPropCount);
1103
1104 VkResult hostRes =
1105 enc->vkEnumerateDeviceExtensionProperties(
1106 physdev, nullptr, &hostPropCount, mHostDeviceExtensions.data());
1107
1108 if (hostRes != VK_SUCCESS) {
1109 return hostRes;
1110 }
1111 }
1112
1113 bool hostHasWin32ExternalSemaphore =
1114 getHostDeviceExtensionIndex(
1115 "VK_KHR_external_semaphore_win32") != -1;
1116
1117 bool hostHasPosixExternalSemaphore =
1118 getHostDeviceExtensionIndex(
1119 "VK_KHR_external_semaphore_fd") != -1;
1120
1121 ALOGD("%s: host has ext semaphore? win32 %d posix %d\n", __func__,
1122 hostHasWin32ExternalSemaphore,
1123 hostHasPosixExternalSemaphore);
1124
1125 bool hostSupportsExternalSemaphore =
1126 hostHasWin32ExternalSemaphore ||
1127 hostHasPosixExternalSemaphore;
1128
1129 std::vector<VkExtensionProperties> filteredExts;
1130
1131 for (size_t i = 0; i < allowedExtensionNames.size(); ++i) {
1132 auto extIndex = getHostDeviceExtensionIndex(allowedExtensionNames[i]);
1133 if (extIndex != -1) {
1134 filteredExts.push_back(mHostDeviceExtensions[extIndex]);
1135 }
1136 }
1137
1138 VkExtensionProperties anbExtProps[] = {
1139 #ifdef VK_USE_PLATFORM_ANDROID_KHR
1140 { "VK_ANDROID_native_buffer", 7 },
1141 #endif
1142 #ifdef VK_USE_PLATFORM_FUCHSIA
1143 { "VK_KHR_external_memory", 1 },
1144 { "VK_KHR_external_semaphore", 1 },
1145 { "VK_FUCHSIA_external_semaphore", 1 },
1146 { "VK_FUCHSIA_buffer_collection", 1 },
1147 #endif
1148 };
1149
1150 for (auto& anbExtProp: anbExtProps) {
1151 filteredExts.push_back(anbExtProp);
1152 }
1153
1154 if (hostSupportsExternalSemaphore &&
1155 !hostHasPosixExternalSemaphore) {
1156 filteredExts.push_back(
1157 { "VK_KHR_external_semaphore_fd", 1});
1158 }
1159
1160 bool win32ExtMemAvailable =
1161 getHostDeviceExtensionIndex(
1162 "VK_KHR_external_memory_win32") != -1;
1163 bool posixExtMemAvailable =
1164 getHostDeviceExtensionIndex(
1165 "VK_KHR_external_memory_fd") != -1;
1166 bool extMoltenVkAvailable =
1167 getHostDeviceExtensionIndex(
1168 "VK_MVK_moltenvk") != -1;
1169
1170 bool hostHasExternalMemorySupport =
1171 win32ExtMemAvailable || posixExtMemAvailable || extMoltenVkAvailable;
1172
1173 if (hostHasExternalMemorySupport) {
1174 #ifdef VK_USE_PLATFORM_ANDROID_KHR
1175 filteredExts.push_back({
1176 "VK_ANDROID_external_memory_android_hardware_buffer", 7
1177 });
1178 filteredExts.push_back({
1179 "VK_EXT_queue_family_foreign", 1
1180 });
1181 #endif
1182 #ifdef VK_USE_PLATFORM_FUCHSIA
1183 filteredExts.push_back({
1184 "VK_FUCHSIA_external_memory", 1
1185 });
1186 #endif
1187 }
1188
1189 // Spec:
1190 //
1191 // https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkEnumerateDeviceExtensionProperties.html
1192 //
1193 // pPropertyCount is a pointer to an integer related to the number of
1194 // extension properties available or queried, and is treated in the
1195 // same fashion as the
1196 // vkEnumerateInstanceExtensionProperties::pPropertyCount parameter.
1197 //
1198 // https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkEnumerateInstanceExtensionProperties.html
1199 //
1200 // If pProperties is NULL, then the number of extensions properties
1201 // available is returned in pPropertyCount. Otherwise, pPropertyCount
1202 // must point to a variable set by the user to the number of elements
1203 // in the pProperties array, and on return the variable is overwritten
1204 // with the number of structures actually written to pProperties. If
1205 // pPropertyCount is less than the number of extension properties
1206 // available, at most pPropertyCount structures will be written. If
1207 // pPropertyCount is smaller than the number of extensions available,
1208 // VK_INCOMPLETE will be returned instead of VK_SUCCESS, to indicate
1209 // that not all the available properties were returned.
1210 //
1211 // pPropertyCount must be a valid pointer to a uint32_t value
1212
1213 if (!pPropertyCount) return VK_ERROR_INITIALIZATION_FAILED;
1214
1215 if (!pProperties) {
1216 *pPropertyCount = (uint32_t)filteredExts.size();
1217 return VK_SUCCESS;
1218 } else {
1219 auto actualExtensionCount = (uint32_t)filteredExts.size();
1220 if (*pPropertyCount > actualExtensionCount) {
1221 *pPropertyCount = actualExtensionCount;
1222 }
1223
1224 for (uint32_t i = 0; i < *pPropertyCount; ++i) {
1225 pProperties[i] = filteredExts[i];
1226 }
1227
1228 if (actualExtensionCount > *pPropertyCount) {
1229 return VK_INCOMPLETE;
1230 }
1231
1232 return VK_SUCCESS;
1233 }
1234 }
1235
on_vkEnumeratePhysicalDevices(void * context,VkResult,VkInstance instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)1236 VkResult on_vkEnumeratePhysicalDevices(
1237 void* context, VkResult,
1238 VkInstance instance, uint32_t* pPhysicalDeviceCount,
1239 VkPhysicalDevice* pPhysicalDevices) {
1240
1241 VkEncoder* enc = (VkEncoder*)context;
1242
1243 if (!instance) return VK_ERROR_INITIALIZATION_FAILED;
1244
1245 if (!pPhysicalDeviceCount) return VK_ERROR_INITIALIZATION_FAILED;
1246
1247 AutoLock lock(mLock);
1248
1249 // When this function is called, we actually need to do two things:
1250 // - Get full information about physical devices from the host,
1251 // even if the guest did not ask for it
1252 // - Serve the guest query according to the spec:
1253 //
1254 // https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkEnumeratePhysicalDevices.html
1255
1256 auto it = info_VkInstance.find(instance);
1257
1258 if (it == info_VkInstance.end()) return VK_ERROR_INITIALIZATION_FAILED;
1259
1260 auto& info = it->second;
1261
1262 // Get the full host information here if it doesn't exist already.
1263 if (info.physicalDevices.empty()) {
1264 uint32_t hostPhysicalDeviceCount = 0;
1265
1266 lock.unlock();
1267 VkResult countRes = enc->vkEnumeratePhysicalDevices(
1268 instance, &hostPhysicalDeviceCount, nullptr);
1269 lock.lock();
1270
1271 if (countRes != VK_SUCCESS) {
1272 ALOGE("%s: failed: could not count host physical devices. "
1273 "Error %d\n", __func__, countRes);
1274 return countRes;
1275 }
1276
1277 info.physicalDevices.resize(hostPhysicalDeviceCount);
1278
1279 lock.unlock();
1280 VkResult enumRes = enc->vkEnumeratePhysicalDevices(
1281 instance, &hostPhysicalDeviceCount, info.physicalDevices.data());
1282 lock.lock();
1283
1284 if (enumRes != VK_SUCCESS) {
1285 ALOGE("%s: failed: could not retrieve host physical devices. "
1286 "Error %d\n", __func__, enumRes);
1287 return enumRes;
1288 }
1289 }
1290
1291 // Serve the guest query according to the spec.
1292 //
1293 // https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkEnumeratePhysicalDevices.html
1294 //
1295 // If pPhysicalDevices is NULL, then the number of physical devices
1296 // available is returned in pPhysicalDeviceCount. Otherwise,
1297 // pPhysicalDeviceCount must point to a variable set by the user to the
1298 // number of elements in the pPhysicalDevices array, and on return the
1299 // variable is overwritten with the number of handles actually written
1300 // to pPhysicalDevices. If pPhysicalDeviceCount is less than the number
1301 // of physical devices available, at most pPhysicalDeviceCount
1302 // structures will be written. If pPhysicalDeviceCount is smaller than
1303 // the number of physical devices available, VK_INCOMPLETE will be
1304 // returned instead of VK_SUCCESS, to indicate that not all the
1305 // available physical devices were returned.
1306
1307 if (!pPhysicalDevices) {
1308 *pPhysicalDeviceCount = (uint32_t)info.physicalDevices.size();
1309 return VK_SUCCESS;
1310 } else {
1311 uint32_t actualDeviceCount = (uint32_t)info.physicalDevices.size();
1312 uint32_t toWrite = actualDeviceCount < *pPhysicalDeviceCount ? actualDeviceCount : *pPhysicalDeviceCount;
1313
1314 for (uint32_t i = 0; i < toWrite; ++i) {
1315 pPhysicalDevices[i] = info.physicalDevices[i];
1316 }
1317
1318 *pPhysicalDeviceCount = toWrite;
1319
1320 if (actualDeviceCount > *pPhysicalDeviceCount) {
1321 return VK_INCOMPLETE;
1322 }
1323
1324 return VK_SUCCESS;
1325 }
1326 }
1327
on_vkGetPhysicalDeviceMemoryProperties(void *,VkPhysicalDevice physdev,VkPhysicalDeviceMemoryProperties * out)1328 void on_vkGetPhysicalDeviceMemoryProperties(
1329 void*,
1330 VkPhysicalDevice physdev,
1331 VkPhysicalDeviceMemoryProperties* out) {
1332
1333 initHostVisibleMemoryVirtualizationInfo(
1334 physdev,
1335 out,
1336 mFeatureInfo.get(),
1337 &mHostVisibleMemoryVirtInfo);
1338
1339 if (mHostVisibleMemoryVirtInfo.virtualizationSupported) {
1340 *out = mHostVisibleMemoryVirtInfo.guestMemoryProperties;
1341 }
1342 }
1343
on_vkGetPhysicalDeviceMemoryProperties2(void *,VkPhysicalDevice physdev,VkPhysicalDeviceMemoryProperties2 * out)1344 void on_vkGetPhysicalDeviceMemoryProperties2(
1345 void*,
1346 VkPhysicalDevice physdev,
1347 VkPhysicalDeviceMemoryProperties2* out) {
1348
1349 initHostVisibleMemoryVirtualizationInfo(
1350 physdev,
1351 &out->memoryProperties,
1352 mFeatureInfo.get(),
1353 &mHostVisibleMemoryVirtInfo);
1354
1355 if (mHostVisibleMemoryVirtInfo.virtualizationSupported) {
1356 out->memoryProperties = mHostVisibleMemoryVirtInfo.guestMemoryProperties;
1357 }
1358 }
1359
on_vkCreateInstance(void * context,VkResult input_result,const VkInstanceCreateInfo * createInfo,const VkAllocationCallbacks *,VkInstance * pInstance)1360 VkResult on_vkCreateInstance(
1361 void* context,
1362 VkResult input_result,
1363 const VkInstanceCreateInfo* createInfo,
1364 const VkAllocationCallbacks*,
1365 VkInstance* pInstance) {
1366
1367 if (input_result != VK_SUCCESS) return input_result;
1368
1369 VkEncoder* enc = (VkEncoder*)context;
1370
1371 uint32_t apiVersion;
1372 VkResult enumInstanceVersionRes =
1373 enc->vkEnumerateInstanceVersion(&apiVersion);
1374
1375 setInstanceInfo(
1376 *pInstance,
1377 createInfo->enabledExtensionCount,
1378 createInfo->ppEnabledExtensionNames,
1379 apiVersion);
1380
1381 return input_result;
1382 }
1383
on_vkCreateDevice(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks *,VkDevice * pDevice)1384 VkResult on_vkCreateDevice(
1385 void* context,
1386 VkResult input_result,
1387 VkPhysicalDevice physicalDevice,
1388 const VkDeviceCreateInfo* pCreateInfo,
1389 const VkAllocationCallbacks*,
1390 VkDevice* pDevice) {
1391
1392 if (input_result != VK_SUCCESS) return input_result;
1393
1394 VkEncoder* enc = (VkEncoder*)context;
1395
1396 VkPhysicalDeviceProperties props;
1397 VkPhysicalDeviceMemoryProperties memProps;
1398 enc->vkGetPhysicalDeviceProperties(physicalDevice, &props);
1399 enc->vkGetPhysicalDeviceMemoryProperties(physicalDevice, &memProps);
1400
1401 setDeviceInfo(
1402 *pDevice, physicalDevice, props, memProps,
1403 pCreateInfo->enabledExtensionCount, pCreateInfo->ppEnabledExtensionNames);
1404
1405 return input_result;
1406 }
1407
on_vkDestroyDevice_pre(void * context,VkDevice device,const VkAllocationCallbacks *)1408 void on_vkDestroyDevice_pre(
1409 void* context,
1410 VkDevice device,
1411 const VkAllocationCallbacks*) {
1412
1413 AutoLock lock(mLock);
1414
1415 auto it = info_VkDevice.find(device);
1416 if (it == info_VkDevice.end()) return;
1417 auto info = it->second;
1418
1419 lock.unlock();
1420
1421 VkEncoder* enc = (VkEncoder*)context;
1422
1423 bool freeMemorySyncSupported =
1424 mFeatureInfo->hasVulkanFreeMemorySync;
1425 for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) {
1426 for (auto& block : info.hostMemBlocks[i]) {
1427 destroyHostMemAlloc(
1428 freeMemorySyncSupported,
1429 enc, device, &block);
1430 }
1431 }
1432 }
1433
on_vkGetAndroidHardwareBufferPropertiesANDROID(void *,VkResult,VkDevice device,const AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties)1434 VkResult on_vkGetAndroidHardwareBufferPropertiesANDROID(
1435 void*, VkResult,
1436 VkDevice device,
1437 const AHardwareBuffer* buffer,
1438 VkAndroidHardwareBufferPropertiesANDROID* pProperties) {
1439 auto grallocHelper =
1440 mThreadingCallbacks.hostConnectionGetFunc()->grallocHelper();
1441 return getAndroidHardwareBufferPropertiesANDROID(
1442 grallocHelper,
1443 &mHostVisibleMemoryVirtInfo,
1444 device, buffer, pProperties);
1445 }
1446
on_vkGetMemoryAndroidHardwareBufferANDROID(void *,VkResult,VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer)1447 VkResult on_vkGetMemoryAndroidHardwareBufferANDROID(
1448 void*, VkResult,
1449 VkDevice device,
1450 const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
1451 struct AHardwareBuffer** pBuffer) {
1452
1453 if (!pInfo) return VK_ERROR_INITIALIZATION_FAILED;
1454 if (!pInfo->memory) return VK_ERROR_INITIALIZATION_FAILED;
1455
1456 AutoLock lock(mLock);
1457
1458 auto deviceIt = info_VkDevice.find(device);
1459
1460 if (deviceIt == info_VkDevice.end()) {
1461 return VK_ERROR_INITIALIZATION_FAILED;
1462 }
1463
1464 auto memoryIt = info_VkDeviceMemory.find(pInfo->memory);
1465
1466 if (memoryIt == info_VkDeviceMemory.end()) {
1467 return VK_ERROR_INITIALIZATION_FAILED;
1468 }
1469
1470 auto& info = memoryIt->second;
1471
1472 VkResult queryRes =
1473 getMemoryAndroidHardwareBufferANDROID(&info.ahw);
1474
1475 if (queryRes != VK_SUCCESS) return queryRes;
1476
1477 *pBuffer = info.ahw;
1478
1479 return queryRes;
1480 }
1481
1482 #ifdef VK_USE_PLATFORM_FUCHSIA
on_vkGetMemoryZirconHandleFUCHSIA(void *,VkResult,VkDevice device,const VkMemoryGetZirconHandleInfoFUCHSIA * pInfo,uint32_t * pHandle)1483 VkResult on_vkGetMemoryZirconHandleFUCHSIA(
1484 void*, VkResult,
1485 VkDevice device,
1486 const VkMemoryGetZirconHandleInfoFUCHSIA* pInfo,
1487 uint32_t* pHandle) {
1488
1489 if (!pInfo) return VK_ERROR_INITIALIZATION_FAILED;
1490 if (!pInfo->memory) return VK_ERROR_INITIALIZATION_FAILED;
1491
1492 AutoLock lock(mLock);
1493
1494 auto deviceIt = info_VkDevice.find(device);
1495
1496 if (deviceIt == info_VkDevice.end()) {
1497 return VK_ERROR_INITIALIZATION_FAILED;
1498 }
1499
1500 auto memoryIt = info_VkDeviceMemory.find(pInfo->memory);
1501
1502 if (memoryIt == info_VkDeviceMemory.end()) {
1503 return VK_ERROR_INITIALIZATION_FAILED;
1504 }
1505
1506 auto& info = memoryIt->second;
1507
1508 if (info.vmoHandle == ZX_HANDLE_INVALID) {
1509 ALOGE("%s: memory cannot be exported", __func__);
1510 return VK_ERROR_INITIALIZATION_FAILED;
1511 }
1512
1513 *pHandle = ZX_HANDLE_INVALID;
1514 zx_handle_duplicate(info.vmoHandle, ZX_RIGHT_SAME_RIGHTS, pHandle);
1515 return VK_SUCCESS;
1516 }
1517
on_vkGetMemoryZirconHandlePropertiesFUCHSIA(void *,VkResult,VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,uint32_t handle,VkMemoryZirconHandlePropertiesFUCHSIA * pProperties)1518 VkResult on_vkGetMemoryZirconHandlePropertiesFUCHSIA(
1519 void*, VkResult,
1520 VkDevice device,
1521 VkExternalMemoryHandleTypeFlagBits handleType,
1522 uint32_t handle,
1523 VkMemoryZirconHandlePropertiesFUCHSIA* pProperties) {
1524 if (handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_TEMP_ZIRCON_VMO_BIT_FUCHSIA) {
1525 return VK_ERROR_INITIALIZATION_FAILED;
1526 }
1527
1528 AutoLock lock(mLock);
1529
1530 auto deviceIt = info_VkDevice.find(device);
1531
1532 if (deviceIt == info_VkDevice.end()) {
1533 return VK_ERROR_INITIALIZATION_FAILED;
1534 }
1535
1536 auto& info = deviceIt->second;
1537
1538 // Device local memory type supported.
1539 pProperties->memoryTypeBits = 0;
1540 for (uint32_t i = 0; i < info.memProps.memoryTypeCount; ++i) {
1541 if (info.memProps.memoryTypes[i].propertyFlags &
1542 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
1543 pProperties->memoryTypeBits |= 1ull << i;
1544 }
1545 }
1546 return VK_SUCCESS;
1547 }
1548
on_vkImportSemaphoreZirconHandleFUCHSIA(void *,VkResult,VkDevice device,const VkImportSemaphoreZirconHandleInfoFUCHSIA * pInfo)1549 VkResult on_vkImportSemaphoreZirconHandleFUCHSIA(
1550 void*, VkResult,
1551 VkDevice device,
1552 const VkImportSemaphoreZirconHandleInfoFUCHSIA* pInfo) {
1553
1554 if (!pInfo) return VK_ERROR_INITIALIZATION_FAILED;
1555 if (!pInfo->semaphore) return VK_ERROR_INITIALIZATION_FAILED;
1556
1557 AutoLock lock(mLock);
1558
1559 auto deviceIt = info_VkDevice.find(device);
1560
1561 if (deviceIt == info_VkDevice.end()) {
1562 return VK_ERROR_INITIALIZATION_FAILED;
1563 }
1564
1565 auto semaphoreIt = info_VkSemaphore.find(pInfo->semaphore);
1566
1567 if (semaphoreIt == info_VkSemaphore.end()) {
1568 return VK_ERROR_INITIALIZATION_FAILED;
1569 }
1570
1571 auto& info = semaphoreIt->second;
1572
1573 if (info.eventHandle != ZX_HANDLE_INVALID) {
1574 zx_handle_close(info.eventHandle);
1575 }
1576 info.eventHandle = pInfo->handle;
1577
1578 return VK_SUCCESS;
1579 }
1580
on_vkGetSemaphoreZirconHandleFUCHSIA(void *,VkResult,VkDevice device,const VkSemaphoreGetZirconHandleInfoFUCHSIA * pInfo,uint32_t * pHandle)1581 VkResult on_vkGetSemaphoreZirconHandleFUCHSIA(
1582 void*, VkResult,
1583 VkDevice device,
1584 const VkSemaphoreGetZirconHandleInfoFUCHSIA* pInfo,
1585 uint32_t* pHandle) {
1586
1587 if (!pInfo) return VK_ERROR_INITIALIZATION_FAILED;
1588 if (!pInfo->semaphore) return VK_ERROR_INITIALIZATION_FAILED;
1589
1590 AutoLock lock(mLock);
1591
1592 auto deviceIt = info_VkDevice.find(device);
1593
1594 if (deviceIt == info_VkDevice.end()) {
1595 return VK_ERROR_INITIALIZATION_FAILED;
1596 }
1597
1598 auto semaphoreIt = info_VkSemaphore.find(pInfo->semaphore);
1599
1600 if (semaphoreIt == info_VkSemaphore.end()) {
1601 return VK_ERROR_INITIALIZATION_FAILED;
1602 }
1603
1604 auto& info = semaphoreIt->second;
1605
1606 if (info.eventHandle == ZX_HANDLE_INVALID) {
1607 return VK_ERROR_INITIALIZATION_FAILED;
1608 }
1609
1610 *pHandle = ZX_HANDLE_INVALID;
1611 zx_handle_duplicate(info.eventHandle, ZX_RIGHT_SAME_RIGHTS, pHandle);
1612 return VK_SUCCESS;
1613 }
1614
on_vkCreateBufferCollectionFUCHSIA(void *,VkResult,VkDevice,const VkBufferCollectionCreateInfoFUCHSIA * pInfo,const VkAllocationCallbacks *,VkBufferCollectionFUCHSIA * pCollection)1615 VkResult on_vkCreateBufferCollectionFUCHSIA(
1616 void*, VkResult, VkDevice,
1617 const VkBufferCollectionCreateInfoFUCHSIA* pInfo,
1618 const VkAllocationCallbacks*,
1619 VkBufferCollectionFUCHSIA* pCollection) {
1620 fuchsia::sysmem::BufferCollectionTokenSyncPtr token;
1621 if (pInfo->collectionToken) {
1622 token.Bind(zx::channel(pInfo->collectionToken));
1623 } else {
1624 zx_status_t status = mSysmemAllocator->AllocateSharedCollection(token.NewRequest());
1625 if (status != ZX_OK) {
1626 ALOGE("AllocateSharedCollection failed: %d", status);
1627 return VK_ERROR_INITIALIZATION_FAILED;
1628 }
1629 }
1630 auto sysmem_collection = new fuchsia::sysmem::BufferCollectionSyncPtr;
1631 zx_status_t status = mSysmemAllocator->BindSharedCollection(
1632 std::move(token), sysmem_collection->NewRequest());
1633 if (status != ZX_OK) {
1634 ALOGE("BindSharedCollection failed: %d", status);
1635 return VK_ERROR_INITIALIZATION_FAILED;
1636 }
1637 *pCollection = reinterpret_cast<VkBufferCollectionFUCHSIA>(sysmem_collection);
1638 return VK_SUCCESS;
1639 }
1640
on_vkDestroyBufferCollectionFUCHSIA(void *,VkResult,VkDevice,VkBufferCollectionFUCHSIA collection,const VkAllocationCallbacks *)1641 void on_vkDestroyBufferCollectionFUCHSIA(
1642 void*, VkResult, VkDevice,
1643 VkBufferCollectionFUCHSIA collection,
1644 const VkAllocationCallbacks*) {
1645 auto sysmem_collection = reinterpret_cast<fuchsia::sysmem::BufferCollectionSyncPtr*>(collection);
1646 if (sysmem_collection->is_bound()) {
1647 (*sysmem_collection)->Close();
1648 }
1649 delete sysmem_collection;
1650 }
1651
setBufferCollectionConstraints(fuchsia::sysmem::BufferCollectionSyncPtr * collection,const VkImageCreateInfo * pImageInfo,size_t min_size_bytes)1652 void setBufferCollectionConstraints(fuchsia::sysmem::BufferCollectionSyncPtr* collection,
1653 const VkImageCreateInfo* pImageInfo,
1654 size_t min_size_bytes) {
1655 fuchsia::sysmem::BufferCollectionConstraints constraints = {};
1656 constraints.usage.vulkan = fuchsia::sysmem::vulkanUsageColorAttachment |
1657 fuchsia::sysmem::vulkanUsageTransferSrc |
1658 fuchsia::sysmem::vulkanUsageTransferDst |
1659 fuchsia::sysmem::vulkanUsageSampled;
1660 constraints.min_buffer_count = 1;
1661 constraints.has_buffer_memory_constraints = true;
1662 fuchsia::sysmem::BufferMemoryConstraints& buffer_constraints =
1663 constraints.buffer_memory_constraints;
1664 buffer_constraints.min_size_bytes = min_size_bytes;
1665 buffer_constraints.max_size_bytes = 0xffffffff;
1666 buffer_constraints.physically_contiguous_required = false;
1667 buffer_constraints.secure_required = false;
1668 buffer_constraints.ram_domain_supported = false;
1669 buffer_constraints.cpu_domain_supported = false;
1670 buffer_constraints.inaccessible_domain_supported = true;
1671 buffer_constraints.heap_permitted_count = 1;
1672 buffer_constraints.heap_permitted[0] =
1673 fuchsia::sysmem::HeapType::GOLDFISH_DEVICE_LOCAL;
1674 constraints.image_format_constraints_count = 1;
1675 fuchsia::sysmem::ImageFormatConstraints& image_constraints =
1676 constraints.image_format_constraints[0];
1677 image_constraints.pixel_format.type = fuchsia::sysmem::PixelFormatType::BGRA32;
1678 image_constraints.color_spaces_count = 1;
1679 image_constraints.color_space[0].type = fuchsia::sysmem::ColorSpaceType::SRGB;
1680 image_constraints.min_coded_width = pImageInfo->extent.width;
1681 image_constraints.max_coded_width = 0xfffffff;
1682 image_constraints.min_coded_height = pImageInfo->extent.height;
1683 image_constraints.max_coded_height = 0xffffffff;
1684 image_constraints.min_bytes_per_row = pImageInfo->extent.width * 4;
1685 image_constraints.max_bytes_per_row = 0xffffffff;
1686 image_constraints.max_coded_width_times_coded_height = 0xffffffff;
1687 image_constraints.layers = 1;
1688 image_constraints.coded_width_divisor = 1;
1689 image_constraints.coded_height_divisor = 1;
1690 image_constraints.bytes_per_row_divisor = 1;
1691 image_constraints.start_offset_divisor = 1;
1692 image_constraints.display_width_divisor = 1;
1693 image_constraints.display_height_divisor = 1;
1694
1695 (*collection)->SetConstraints(true, constraints);
1696 }
1697
on_vkSetBufferCollectionConstraintsFUCHSIA(void *,VkResult,VkDevice,VkBufferCollectionFUCHSIA collection,const VkImageCreateInfo * pImageInfo)1698 VkResult on_vkSetBufferCollectionConstraintsFUCHSIA(
1699 void*, VkResult, VkDevice,
1700 VkBufferCollectionFUCHSIA collection,
1701 const VkImageCreateInfo* pImageInfo) {
1702 auto sysmem_collection =
1703 reinterpret_cast<fuchsia::sysmem::BufferCollectionSyncPtr*>(collection);
1704 setBufferCollectionConstraints(
1705 sysmem_collection, pImageInfo,
1706 pImageInfo->extent.width * pImageInfo->extent.height * 4);
1707 return VK_SUCCESS;
1708 }
1709
on_vkGetBufferCollectionPropertiesFUCHSIA(void *,VkResult,VkDevice device,VkBufferCollectionFUCHSIA collection,VkBufferCollectionPropertiesFUCHSIA * pProperties)1710 VkResult on_vkGetBufferCollectionPropertiesFUCHSIA(
1711 void*, VkResult,
1712 VkDevice device,
1713 VkBufferCollectionFUCHSIA collection,
1714 VkBufferCollectionPropertiesFUCHSIA* pProperties) {
1715 auto sysmem_collection = reinterpret_cast<fuchsia::sysmem::BufferCollectionSyncPtr*>(collection);
1716 fuchsia::sysmem::BufferCollectionInfo_2 info;
1717 zx_status_t status2;
1718 zx_status_t status = (*sysmem_collection)->WaitForBuffersAllocated(&status2, &info);
1719 if (status != ZX_OK || status2 != ZX_OK) {
1720 ALOGE("Failed wait for allocation: %d %d", status, status2);
1721 return VK_ERROR_INITIALIZATION_FAILED;
1722 }
1723 if (!info.settings.has_image_format_constraints) {
1724 return VK_ERROR_INITIALIZATION_FAILED;
1725 }
1726 pProperties->count = info.buffer_count;
1727
1728 AutoLock lock(mLock);
1729
1730 auto deviceIt = info_VkDevice.find(device);
1731
1732 if (deviceIt == info_VkDevice.end()) {
1733 return VK_ERROR_INITIALIZATION_FAILED;
1734 }
1735
1736 auto& deviceInfo = deviceIt->second;
1737
1738 // Device local memory type supported.
1739 pProperties->memoryTypeBits = 0;
1740 for (uint32_t i = 0; i < deviceInfo.memProps.memoryTypeCount; ++i) {
1741 if (deviceInfo.memProps.memoryTypes[i].propertyFlags &
1742 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
1743 pProperties->memoryTypeBits |= 1ull << i;
1744 }
1745 }
1746 return VK_SUCCESS;
1747 }
1748 #endif
1749
getOrAllocateHostMemBlockLocked(HostMemBlocks & blocks,const VkMemoryAllocateInfo * pAllocateInfo,VkEncoder * enc,VkDevice device,const VkDevice_Info & deviceInfo)1750 HostMemBlockIndex getOrAllocateHostMemBlockLocked(
1751 HostMemBlocks& blocks,
1752 const VkMemoryAllocateInfo* pAllocateInfo,
1753 VkEncoder* enc,
1754 VkDevice device,
1755 const VkDevice_Info& deviceInfo) {
1756
1757 HostMemBlockIndex res = 0;
1758 bool found = false;
1759
1760 while (!found) {
1761 for (HostMemBlockIndex i = 0; i < blocks.size(); ++i) {
1762 if (blocks[i].initialized &&
1763 blocks[i].initResult == VK_SUCCESS &&
1764 canSubAlloc(
1765 blocks[i].subAlloc,
1766 pAllocateInfo->allocationSize)) {
1767 res = i;
1768 found = true;
1769 return res;
1770 }
1771 }
1772
1773 blocks.push_back({});
1774
1775 auto& hostMemAlloc = blocks.back();
1776
1777 // Uninitialized block; allocate on host.
1778 static constexpr VkDeviceSize oneMb = 1048576;
1779 static constexpr VkDeviceSize kDefaultHostMemBlockSize =
1780 16 * oneMb; // 16 mb
1781 VkDeviceSize roundedUpAllocSize =
1782 oneMb * ((pAllocateInfo->allocationSize + oneMb - 1) / oneMb);
1783
1784 VkDeviceSize virtualHeapSize = VIRTUAL_HOST_VISIBLE_HEAP_SIZE;
1785
1786 VkDeviceSize blockSizeNeeded =
1787 std::max(roundedUpAllocSize,
1788 std::min(virtualHeapSize,
1789 kDefaultHostMemBlockSize));
1790
1791 VkMemoryAllocateInfo allocInfoForHost = *pAllocateInfo;
1792
1793 allocInfoForHost.allocationSize = blockSizeNeeded;
1794
1795 // TODO: Support dedicated/external host visible allocation
1796 allocInfoForHost.pNext = nullptr;
1797
1798 mLock.unlock();
1799 VkResult host_res =
1800 enc->vkAllocateMemory(
1801 device,
1802 &allocInfoForHost,
1803 nullptr,
1804 &hostMemAlloc.memory);
1805 mLock.lock();
1806
1807 if (host_res != VK_SUCCESS) {
1808 ALOGE("Could not allocate backing for virtual host visible memory: %d",
1809 host_res);
1810 hostMemAlloc.initialized = true;
1811 hostMemAlloc.initResult = host_res;
1812 return INVALID_HOST_MEM_BLOCK;
1813 }
1814
1815 auto& hostMemInfo = info_VkDeviceMemory[hostMemAlloc.memory];
1816 hostMemInfo.allocationSize = allocInfoForHost.allocationSize;
1817 VkDeviceSize nonCoherentAtomSize =
1818 deviceInfo.props.limits.nonCoherentAtomSize;
1819 hostMemInfo.mappedSize = hostMemInfo.allocationSize;
1820 hostMemInfo.memoryTypeIndex =
1821 pAllocateInfo->memoryTypeIndex;
1822 hostMemAlloc.nonCoherentAtomSize = nonCoherentAtomSize;
1823
1824 uint64_t directMappedAddr = 0;
1825
1826
1827 VkResult directMapResult = VK_SUCCESS;
1828 if (mFeatureInfo->hasDirectMem) {
1829 mLock.unlock();
1830 directMapResult =
1831 enc->vkMapMemoryIntoAddressSpaceGOOGLE(
1832 device, hostMemAlloc.memory, &directMappedAddr);
1833 mLock.lock();
1834 } else if (mFeatureInfo->hasVirtioGpuNext) {
1835 #if !defined(HOST_BUILD) && defined(VK_USE_PLATFORM_ANDROID_KHR)
1836 uint64_t hvaSizeId[3];
1837
1838 mLock.unlock();
1839 enc->vkGetMemoryHostAddressInfoGOOGLE(
1840 device, hostMemAlloc.memory,
1841 &hvaSizeId[0], &hvaSizeId[1], &hvaSizeId[2]);
1842 ALOGD("%s: hvaOff, size: 0x%llx 0x%llx id: 0x%llx\n", __func__,
1843 (unsigned long long)hvaSizeId[0],
1844 (unsigned long long)hvaSizeId[1],
1845 (unsigned long long)hvaSizeId[2]);
1846 mLock.lock();
1847
1848 struct drm_virtgpu_resource_create_blob drm_rc_blob = { 0 };
1849 drm_rc_blob.blob_mem = VIRTGPU_BLOB_MEM_HOST;
1850 drm_rc_blob.blob_flags = VIRTGPU_BLOB_FLAG_MAPPABLE;
1851 drm_rc_blob.blob_id = hvaSizeId[2];
1852 drm_rc_blob.size = hvaSizeId[1];
1853
1854 int res = drmIoctl(
1855 mRendernodeFd, DRM_IOCTL_VIRTGPU_RESOURCE_CREATE_BLOB, &drm_rc_blob);
1856
1857 if (res) {
1858 ALOGE("%s: Failed to resource create v2: sterror: %s errno: %d\n", __func__,
1859 strerror(errno), errno);
1860 abort();
1861 }
1862
1863 struct drm_virtgpu_map map_info = {
1864 .handle = drm_rc_blob.bo_handle,
1865 };
1866
1867 res = drmIoctl(mRendernodeFd, DRM_IOCTL_VIRTGPU_MAP, &map_info);
1868 if (res) {
1869 ALOGE("%s: Failed to virtgpu map: sterror: %s errno: %d\n", __func__,
1870 strerror(errno), errno);
1871 abort();
1872 }
1873
1874 directMappedAddr = (uint64_t)(uintptr_t)
1875 mmap64(0, hvaSizeId[1], PROT_WRITE, MAP_SHARED, mRendernodeFd, map_info.offset);
1876
1877 if (!directMappedAddr) {
1878 ALOGE("%s: mmap of virtio gpu resource failed\n", __func__);
1879 abort();
1880 }
1881
1882 // add the host's page offset
1883 directMappedAddr += (uint64_t)(uintptr_t)(hvaSizeId[0]) & (PAGE_SIZE - 1);
1884 directMapResult = VK_SUCCESS;
1885 #endif // VK_USE_PLATFORM_ANDROID_KHR
1886 }
1887
1888 if (directMapResult != VK_SUCCESS) {
1889 hostMemAlloc.initialized = true;
1890 hostMemAlloc.initResult = directMapResult;
1891 mLock.unlock();
1892 enc->vkFreeMemory(device, hostMemAlloc.memory, nullptr);
1893 mLock.lock();
1894 return INVALID_HOST_MEM_BLOCK;
1895 }
1896
1897 hostMemInfo.mappedPtr =
1898 (uint8_t*)(uintptr_t)directMappedAddr;
1899 hostMemInfo.virtualHostVisibleBacking = true;
1900
1901 VkResult hostMemAllocRes =
1902 finishHostMemAllocInit(
1903 enc,
1904 device,
1905 pAllocateInfo->memoryTypeIndex,
1906 nonCoherentAtomSize,
1907 hostMemInfo.allocationSize,
1908 hostMemInfo.mappedSize,
1909 hostMemInfo.mappedPtr,
1910 &hostMemAlloc);
1911
1912 if (hostMemAllocRes != VK_SUCCESS) {
1913 return INVALID_HOST_MEM_BLOCK;
1914 }
1915 }
1916
1917 // unreacheable, but we need to make Werror happy
1918 return INVALID_HOST_MEM_BLOCK;
1919 }
1920
on_vkAllocateMemory(void * context,VkResult input_result,VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)1921 VkResult on_vkAllocateMemory(
1922 void* context,
1923 VkResult input_result,
1924 VkDevice device,
1925 const VkMemoryAllocateInfo* pAllocateInfo,
1926 const VkAllocationCallbacks* pAllocator,
1927 VkDeviceMemory* pMemory) {
1928
1929 if (input_result != VK_SUCCESS) return input_result;
1930
1931 VkEncoder* enc = (VkEncoder*)context;
1932
1933 VkMemoryAllocateInfo finalAllocInfo = vk_make_orphan_copy(*pAllocateInfo);
1934 vk_struct_chain_iterator structChainIter = vk_make_chain_iterator(&finalAllocInfo);
1935
1936 VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
1937 VkImportColorBufferGOOGLE importCbInfo = {
1938 VK_STRUCTURE_TYPE_IMPORT_COLOR_BUFFER_GOOGLE, 0,
1939 };
1940 // VkImportPhysicalAddressGOOGLE importPhysAddrInfo = {
1941 // VK_STRUCTURE_TYPE_IMPORT_PHYSICAL_ADDRESS_GOOGLE, 0,
1942 // };
1943
1944 const VkExportMemoryAllocateInfo* exportAllocateInfoPtr =
1945 vk_find_struct<VkExportMemoryAllocateInfo>(pAllocateInfo);
1946
1947 const VkImportAndroidHardwareBufferInfoANDROID* importAhbInfoPtr =
1948 vk_find_struct<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo);
1949
1950 const VkImportMemoryBufferCollectionFUCHSIA* importBufferCollectionInfoPtr =
1951 vk_find_struct<VkImportMemoryBufferCollectionFUCHSIA>(pAllocateInfo);
1952
1953 const VkImportMemoryZirconHandleInfoFUCHSIA* importVmoInfoPtr =
1954 vk_find_struct<VkImportMemoryZirconHandleInfoFUCHSIA>(pAllocateInfo);
1955
1956 const VkMemoryDedicatedAllocateInfo* dedicatedAllocInfoPtr =
1957 vk_find_struct<VkMemoryDedicatedAllocateInfo>(pAllocateInfo);
1958
1959 bool shouldPassThroughDedicatedAllocInfo =
1960 !exportAllocateInfoPtr &&
1961 !importAhbInfoPtr &&
1962 !importBufferCollectionInfoPtr &&
1963 !importVmoInfoPtr &&
1964 !isHostVisibleMemoryTypeIndexForGuest(
1965 &mHostVisibleMemoryVirtInfo,
1966 pAllocateInfo->memoryTypeIndex);
1967
1968 if (!exportAllocateInfoPtr &&
1969 (importAhbInfoPtr || importBufferCollectionInfoPtr || importVmoInfoPtr) &&
1970 dedicatedAllocInfoPtr &&
1971 isHostVisibleMemoryTypeIndexForGuest(
1972 &mHostVisibleMemoryVirtInfo,
1973 pAllocateInfo->memoryTypeIndex)) {
1974 ALOGE("FATAL: It is not yet supported to import-allocate "
1975 "external memory that is both host visible and dedicated.");
1976 abort();
1977 }
1978
1979 if (shouldPassThroughDedicatedAllocInfo &&
1980 dedicatedAllocInfoPtr) {
1981 dedicatedAllocInfo = vk_make_orphan_copy(*dedicatedAllocInfoPtr);
1982 vk_append_struct(&structChainIter, &dedicatedAllocInfo);
1983 }
1984
1985 // State needed for import/export.
1986 bool exportAhb = false;
1987 bool exportVmo = false;
1988 bool importAhb = false;
1989 bool importBufferCollection = false;
1990 bool importVmo = false;
1991 (void)exportVmo;
1992
1993 // Even if we export allocate, the underlying operation
1994 // for the host is always going to be an import operation.
1995 // This is also how Intel's implementation works,
1996 // and is generally simpler;
1997 // even in an export allocation,
1998 // we perform AHardwareBuffer allocation
1999 // on the guest side, at this layer,
2000 // and then we attach a new VkDeviceMemory
2001 // to the AHardwareBuffer on the host via an "import" operation.
2002 AHardwareBuffer* ahw = nullptr;
2003
2004 if (exportAllocateInfoPtr) {
2005 exportAhb =
2006 exportAllocateInfoPtr->handleTypes &
2007 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
2008 exportVmo =
2009 exportAllocateInfoPtr->handleTypes &
2010 VK_EXTERNAL_MEMORY_HANDLE_TYPE_TEMP_ZIRCON_VMO_BIT_FUCHSIA;
2011 } else if (importAhbInfoPtr) {
2012 importAhb = true;
2013 } else if (importBufferCollectionInfoPtr) {
2014 importBufferCollection = true;
2015 } else if (importVmoInfoPtr) {
2016 importVmo = true;
2017 }
2018
2019 if (exportAhb) {
2020 bool hasDedicatedImage = dedicatedAllocInfoPtr &&
2021 (dedicatedAllocInfoPtr->image != VK_NULL_HANDLE);
2022 bool hasDedicatedBuffer = dedicatedAllocInfoPtr &&
2023 (dedicatedAllocInfoPtr->buffer != VK_NULL_HANDLE);
2024 VkExtent3D imageExtent = { 0, 0, 0 };
2025 uint32_t imageLayers = 0;
2026 VkFormat imageFormat = VK_FORMAT_UNDEFINED;
2027 VkImageUsageFlags imageUsage = 0;
2028 VkImageCreateFlags imageCreateFlags = 0;
2029 VkDeviceSize bufferSize = 0;
2030 VkDeviceSize allocationInfoAllocSize =
2031 finalAllocInfo.allocationSize;
2032
2033 if (hasDedicatedImage) {
2034 AutoLock lock(mLock);
2035
2036 auto it = info_VkImage.find(
2037 dedicatedAllocInfoPtr->image);
2038 if (it == info_VkImage.end()) return VK_ERROR_INITIALIZATION_FAILED;
2039 const auto& info = it->second;
2040 const auto& imgCi = info.createInfo;
2041
2042 imageExtent = imgCi.extent;
2043 imageLayers = imgCi.arrayLayers;
2044 imageFormat = imgCi.format;
2045 imageUsage = imgCi.usage;
2046 imageCreateFlags = imgCi.flags;
2047 }
2048
2049 if (hasDedicatedBuffer) {
2050 AutoLock lock(mLock);
2051
2052 auto it = info_VkBuffer.find(
2053 dedicatedAllocInfoPtr->buffer);
2054 if (it == info_VkBuffer.end()) return VK_ERROR_INITIALIZATION_FAILED;
2055 const auto& info = it->second;
2056 const auto& bufCi = info.createInfo;
2057
2058 bufferSize = bufCi.size;
2059 }
2060
2061 VkResult ahbCreateRes =
2062 createAndroidHardwareBuffer(
2063 hasDedicatedImage,
2064 hasDedicatedBuffer,
2065 imageExtent,
2066 imageLayers,
2067 imageFormat,
2068 imageUsage,
2069 imageCreateFlags,
2070 bufferSize,
2071 allocationInfoAllocSize,
2072 &ahw);
2073
2074 if (ahbCreateRes != VK_SUCCESS) {
2075 return ahbCreateRes;
2076 }
2077 }
2078
2079 if (importAhb) {
2080 ahw = importAhbInfoPtr->buffer;
2081 // We still need to acquire the AHardwareBuffer.
2082 importAndroidHardwareBuffer(
2083 mThreadingCallbacks.hostConnectionGetFunc()->grallocHelper(),
2084 importAhbInfoPtr, nullptr);
2085 }
2086
2087 if (ahw) {
2088 ALOGD("%s: Import AHardwareBuffer", __func__);
2089 importCbInfo.colorBuffer =
2090 mThreadingCallbacks.hostConnectionGetFunc()->grallocHelper()->
2091 getHostHandle(AHardwareBuffer_getNativeHandle(ahw));
2092 vk_append_struct(&structChainIter, &importCbInfo);
2093 }
2094
2095 zx_handle_t vmo_handle = ZX_HANDLE_INVALID;
2096
2097 if (importBufferCollection) {
2098
2099 #ifdef VK_USE_PLATFORM_FUCHSIA
2100 auto collection = reinterpret_cast<fuchsia::sysmem::BufferCollectionSyncPtr*>(
2101 importBufferCollectionInfoPtr->collection);
2102 fuchsia::sysmem::BufferCollectionInfo_2 info;
2103 zx_status_t status2;
2104 zx_status_t status = (*collection)->WaitForBuffersAllocated(&status2, &info);
2105 if (status != ZX_OK || status2 != ZX_OK) {
2106 ALOGE("WaitForBuffersAllocated failed: %d %d", status);
2107 return VK_ERROR_INITIALIZATION_FAILED;
2108 }
2109 uint32_t index = importBufferCollectionInfoPtr->index;
2110 if (info.buffer_count < index) {
2111 ALOGE("Invalid buffer index: %d %d", index);
2112 return VK_ERROR_INITIALIZATION_FAILED;
2113 }
2114 vmo_handle = info.buffers[index].vmo.release();
2115 #endif
2116
2117 }
2118
2119 if (importVmo) {
2120 vmo_handle = importVmoInfoPtr->handle;
2121 }
2122
2123 #ifdef VK_USE_PLATFORM_FUCHSIA
2124 if (exportVmo) {
2125 bool hasDedicatedImage = dedicatedAllocInfoPtr &&
2126 (dedicatedAllocInfoPtr->image != VK_NULL_HANDLE);
2127 VkImageCreateInfo imageCreateInfo = {};
2128
2129 if (hasDedicatedImage) {
2130 AutoLock lock(mLock);
2131
2132 auto it = info_VkImage.find(dedicatedAllocInfoPtr->image);
2133 if (it == info_VkImage.end()) return VK_ERROR_INITIALIZATION_FAILED;
2134 const auto& imageInfo = it->second;
2135
2136 imageCreateInfo = imageInfo.createInfo;
2137 }
2138
2139 if (imageCreateInfo.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
2140 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
2141 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
2142 VK_IMAGE_USAGE_SAMPLED_BIT)) {
2143 fuchsia::sysmem::BufferCollectionTokenSyncPtr token;
2144 zx_status_t status = mSysmemAllocator->AllocateSharedCollection(
2145 token.NewRequest());
2146 if (status != ZX_OK) {
2147 ALOGE("AllocateSharedCollection failed: %d", status);
2148 abort();
2149 }
2150
2151 fuchsia::sysmem::BufferCollectionSyncPtr collection;
2152 status = mSysmemAllocator->BindSharedCollection(
2153 std::move(token), collection.NewRequest());
2154 if (status != ZX_OK) {
2155 ALOGE("BindSharedCollection failed: %d", status);
2156 abort();
2157 }
2158 setBufferCollectionConstraints(&collection,
2159 &imageCreateInfo,
2160 finalAllocInfo.allocationSize);
2161
2162 fuchsia::sysmem::BufferCollectionInfo_2 info;
2163 zx_status_t status2;
2164 status = collection->WaitForBuffersAllocated(&status2, &info);
2165 if (status == ZX_OK && status2 == ZX_OK) {
2166 if (!info.buffer_count) {
2167 ALOGE("WaitForBuffersAllocated returned invalid count: %d", status);
2168 abort();
2169 }
2170 vmo_handle = info.buffers[0].vmo.release();
2171 } else {
2172 ALOGE("WaitForBuffersAllocated failed: %d %d", status, status2);
2173 abort();
2174 }
2175
2176 collection->Close();
2177
2178 zx::vmo vmo_copy;
2179 status = zx_handle_duplicate(vmo_handle,
2180 ZX_RIGHT_SAME_RIGHTS,
2181 vmo_copy.reset_and_get_address());
2182 if (status != ZX_OK) {
2183 ALOGE("Failed to duplicate VMO: %d", status);
2184 abort();
2185 }
2186 // TODO(reveman): Use imageCreateInfo.format to determine color
2187 // buffer format.
2188 status = mControlDevice->CreateColorBuffer(
2189 std::move(vmo_copy),
2190 imageCreateInfo.extent.width,
2191 imageCreateInfo.extent.height,
2192 fuchsia::hardware::goldfish::ColorBufferFormatType::BGRA,
2193 &status2);
2194 if (status != ZX_OK || status2 != ZX_OK) {
2195 ALOGE("CreateColorBuffer failed: %d:%d", status, status2);
2196 abort();
2197 }
2198 }
2199 }
2200
2201 if (vmo_handle != ZX_HANDLE_INVALID) {
2202 zx::vmo vmo_copy;
2203 zx_status_t status = zx_handle_duplicate(vmo_handle,
2204 ZX_RIGHT_SAME_RIGHTS,
2205 vmo_copy.reset_and_get_address());
2206 if (status != ZX_OK) {
2207 ALOGE("Failed to duplicate VMO: %d", status);
2208 abort();
2209 }
2210 zx_status_t status2 = ZX_OK;
2211 status = mControlDevice->GetColorBuffer(
2212 std::move(vmo_copy), &status2, &importCbInfo.colorBuffer);
2213 if (status != ZX_OK || status2 != ZX_OK) {
2214 ALOGE("GetColorBuffer failed: %d:%d", status, status2);
2215 }
2216 vk_append_struct(&structChainIter, &importCbInfo);
2217 }
2218 #endif
2219
2220 if (!isHostVisibleMemoryTypeIndexForGuest(
2221 &mHostVisibleMemoryVirtInfo,
2222 finalAllocInfo.memoryTypeIndex)) {
2223 input_result =
2224 enc->vkAllocateMemory(
2225 device, &finalAllocInfo, pAllocator, pMemory);
2226
2227 if (input_result != VK_SUCCESS) return input_result;
2228
2229 VkDeviceSize allocationSize = finalAllocInfo.allocationSize;
2230 setDeviceMemoryInfo(
2231 device, *pMemory,
2232 finalAllocInfo.allocationSize,
2233 0, nullptr,
2234 finalAllocInfo.memoryTypeIndex,
2235 ahw,
2236 vmo_handle);
2237
2238 return VK_SUCCESS;
2239 }
2240
2241 // Device-local memory dealing is over. What follows:
2242 // host-visible memory.
2243
2244 if (ahw) {
2245 ALOGE("%s: Host visible export/import allocation "
2246 "of Android hardware buffers is not supported.",
2247 __func__);
2248 abort();
2249 }
2250
2251 if (vmo_handle != ZX_HANDLE_INVALID) {
2252 ALOGE("%s: Host visible export/import allocation "
2253 "of VMO is not supported yet.",
2254 __func__);
2255 abort();
2256 }
2257
2258 // Host visible memory, non external
2259 bool directMappingSupported = usingDirectMapping();
2260 if (!directMappingSupported) {
2261 input_result =
2262 enc->vkAllocateMemory(
2263 device, &finalAllocInfo, pAllocator, pMemory);
2264
2265 if (input_result != VK_SUCCESS) return input_result;
2266
2267 VkDeviceSize mappedSize =
2268 getNonCoherentExtendedSize(device,
2269 finalAllocInfo.allocationSize);
2270 uint8_t* mappedPtr = (uint8_t*)aligned_buf_alloc(4096, mappedSize);
2271 D("host visible alloc (non-direct): "
2272 "size 0x%llx host ptr %p mapped size 0x%llx",
2273 (unsigned long long)finalAllocInfo.allocationSize, mappedPtr,
2274 (unsigned long long)mappedSize);
2275 setDeviceMemoryInfo(
2276 device, *pMemory,
2277 finalAllocInfo.allocationSize,
2278 mappedSize, mappedPtr,
2279 finalAllocInfo.memoryTypeIndex);
2280 return VK_SUCCESS;
2281 }
2282
2283 // Host visible memory with direct mapping via
2284 // VkImportPhysicalAddressGOOGLE
2285 // if (importPhysAddr) {
2286 // vkAllocateMemory(device, &finalAllocInfo, pAllocator, pMemory);
2287 // host maps the host pointer to the guest physical address
2288 // TODO: the host side page offset of the
2289 // host pointer needs to be returned somehow.
2290 // }
2291
2292 // Host visible memory with direct mapping
2293 AutoLock lock(mLock);
2294
2295 auto it = info_VkDevice.find(device);
2296 if (it == info_VkDevice.end()) return VK_ERROR_DEVICE_LOST;
2297 auto& deviceInfo = it->second;
2298
2299 auto& hostMemBlocksForTypeIndex =
2300 deviceInfo.hostMemBlocks[finalAllocInfo.memoryTypeIndex];
2301
2302 HostMemBlockIndex blockIndex =
2303 getOrAllocateHostMemBlockLocked(
2304 hostMemBlocksForTypeIndex,
2305 &finalAllocInfo,
2306 enc,
2307 device,
2308 deviceInfo);
2309
2310 if (blockIndex == (HostMemBlockIndex) INVALID_HOST_MEM_BLOCK) {
2311 return VK_ERROR_OUT_OF_HOST_MEMORY;
2312 }
2313
2314 VkDeviceMemory_Info virtualMemInfo;
2315
2316 subAllocHostMemory(
2317 &hostMemBlocksForTypeIndex[blockIndex],
2318 &finalAllocInfo,
2319 &virtualMemInfo.subAlloc);
2320
2321 virtualMemInfo.allocationSize = virtualMemInfo.subAlloc.subAllocSize;
2322 virtualMemInfo.mappedSize = virtualMemInfo.subAlloc.subMappedSize;
2323 virtualMemInfo.mappedPtr = virtualMemInfo.subAlloc.mappedPtr;
2324 virtualMemInfo.memoryTypeIndex = finalAllocInfo.memoryTypeIndex;
2325 virtualMemInfo.directMapped = true;
2326
2327 D("host visible alloc (direct, suballoc): "
2328 "size 0x%llx ptr %p mapped size 0x%llx",
2329 (unsigned long long)virtualMemInfo.allocationSize, virtualMemInfo.mappedPtr,
2330 (unsigned long long)virtualMemInfo.mappedSize);
2331
2332 info_VkDeviceMemory[
2333 virtualMemInfo.subAlloc.subMemory] = virtualMemInfo;
2334
2335 *pMemory = virtualMemInfo.subAlloc.subMemory;
2336
2337 return VK_SUCCESS;
2338 }
2339
on_vkFreeMemory(void * context,VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocateInfo)2340 void on_vkFreeMemory(
2341 void* context,
2342 VkDevice device,
2343 VkDeviceMemory memory,
2344 const VkAllocationCallbacks* pAllocateInfo) {
2345
2346 AutoLock lock(mLock);
2347
2348 auto it = info_VkDeviceMemory.find(memory);
2349 if (it == info_VkDeviceMemory.end()) return;
2350 auto& info = it->second;
2351
2352 if (!info.directMapped) {
2353 lock.unlock();
2354 VkEncoder* enc = (VkEncoder*)context;
2355 enc->vkFreeMemory(device, memory, pAllocateInfo);
2356 return;
2357 }
2358
2359 subFreeHostMemory(&info.subAlloc);
2360 }
2361
on_vkMapMemory(void *,VkResult host_result,VkDevice,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags,void ** ppData)2362 VkResult on_vkMapMemory(
2363 void*,
2364 VkResult host_result,
2365 VkDevice,
2366 VkDeviceMemory memory,
2367 VkDeviceSize offset,
2368 VkDeviceSize size,
2369 VkMemoryMapFlags,
2370 void** ppData) {
2371
2372 if (host_result != VK_SUCCESS) return host_result;
2373
2374 AutoLock lock(mLock);
2375
2376 auto it = info_VkDeviceMemory.find(memory);
2377 if (it == info_VkDeviceMemory.end()) return VK_ERROR_MEMORY_MAP_FAILED;
2378
2379 auto& info = it->second;
2380
2381 if (!info.mappedPtr) return VK_ERROR_MEMORY_MAP_FAILED;
2382
2383 if (size != VK_WHOLE_SIZE &&
2384 (info.mappedPtr + offset + size > info.mappedPtr + info.allocationSize)) {
2385 return VK_ERROR_MEMORY_MAP_FAILED;
2386 }
2387
2388 *ppData = info.mappedPtr + offset;
2389
2390 return host_result;
2391 }
2392
on_vkUnmapMemory(void *,VkDevice,VkDeviceMemory)2393 void on_vkUnmapMemory(
2394 void*,
2395 VkDevice,
2396 VkDeviceMemory) {
2397 // no-op
2398 }
2399
transformNonExternalResourceMemoryTypeBitsForGuest(uint32_t hostBits)2400 uint32_t transformNonExternalResourceMemoryTypeBitsForGuest(
2401 uint32_t hostBits) {
2402 uint32_t res = 0;
2403 for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) {
2404 if (hostBits & (1 << i)) {
2405 res |= (1 << i);
2406 }
2407 }
2408 return res;
2409 }
2410
transformExternalResourceMemoryTypeBitsForGuest(uint32_t normalBits)2411 uint32_t transformExternalResourceMemoryTypeBitsForGuest(
2412 uint32_t normalBits) {
2413 uint32_t res = 0;
2414 for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) {
2415 if (normalBits & (1 << i) &&
2416 !isHostVisibleMemoryTypeIndexForGuest(
2417 &mHostVisibleMemoryVirtInfo, i)) {
2418 res |= (1 << i);
2419 }
2420 }
2421 return res;
2422 }
2423
transformNonExternalResourceMemoryRequirementsForGuest(VkMemoryRequirements * reqs)2424 void transformNonExternalResourceMemoryRequirementsForGuest(
2425 VkMemoryRequirements* reqs) {
2426 reqs->memoryTypeBits =
2427 transformNonExternalResourceMemoryTypeBitsForGuest(
2428 reqs->memoryTypeBits);
2429 }
2430
transformExternalResourceMemoryRequirementsForGuest(VkMemoryRequirements * reqs)2431 void transformExternalResourceMemoryRequirementsForGuest(
2432 VkMemoryRequirements* reqs) {
2433 reqs->memoryTypeBits =
2434 transformExternalResourceMemoryTypeBitsForGuest(
2435 reqs->memoryTypeBits);
2436 }
2437
transformExternalResourceMemoryDedicatedRequirementsForGuest(VkMemoryDedicatedRequirements * dedicatedReqs)2438 void transformExternalResourceMemoryDedicatedRequirementsForGuest(
2439 VkMemoryDedicatedRequirements* dedicatedReqs) {
2440 dedicatedReqs->prefersDedicatedAllocation = VK_TRUE;
2441 dedicatedReqs->requiresDedicatedAllocation = VK_TRUE;
2442 }
2443
transformImageMemoryRequirementsForGuestLocked(VkImage image,VkMemoryRequirements * reqs)2444 void transformImageMemoryRequirementsForGuestLocked(
2445 VkImage image,
2446 VkMemoryRequirements* reqs) {
2447
2448 auto it = info_VkImage.find(image);
2449 if (it == info_VkImage.end()) return;
2450
2451 auto& info = it->second;
2452
2453 if (!info.external ||
2454 !info.externalCreateInfo.handleTypes) {
2455 transformNonExternalResourceMemoryRequirementsForGuest(reqs);
2456 return;
2457 }
2458
2459 transformExternalResourceMemoryRequirementsForGuest(reqs);
2460
2461 setMemoryRequirementsForSysmemBackedImage(image, reqs);
2462 }
2463
transformBufferMemoryRequirementsForGuestLocked(VkBuffer buffer,VkMemoryRequirements * reqs)2464 void transformBufferMemoryRequirementsForGuestLocked(
2465 VkBuffer buffer,
2466 VkMemoryRequirements* reqs) {
2467
2468 auto it = info_VkBuffer.find(buffer);
2469 if (it == info_VkBuffer.end()) return;
2470
2471 auto& info = it->second;
2472
2473 if (!info.external ||
2474 !info.externalCreateInfo.handleTypes) {
2475 transformNonExternalResourceMemoryRequirementsForGuest(reqs);
2476 return;
2477 }
2478
2479 transformExternalResourceMemoryRequirementsForGuest(reqs);
2480 }
2481
transformImageMemoryRequirements2ForGuest(VkImage image,VkMemoryRequirements2 * reqs2)2482 void transformImageMemoryRequirements2ForGuest(
2483 VkImage image,
2484 VkMemoryRequirements2* reqs2) {
2485
2486 AutoLock lock(mLock);
2487
2488 auto it = info_VkImage.find(image);
2489 if (it == info_VkImage.end()) return;
2490
2491 auto& info = it->second;
2492
2493 if (!info.external ||
2494 !info.externalCreateInfo.handleTypes) {
2495 transformNonExternalResourceMemoryRequirementsForGuest(
2496 &reqs2->memoryRequirements);
2497 return;
2498 }
2499
2500 transformExternalResourceMemoryRequirementsForGuest(&reqs2->memoryRequirements);
2501
2502 setMemoryRequirementsForSysmemBackedImage(image, &reqs2->memoryRequirements);
2503
2504 VkMemoryDedicatedRequirements* dedicatedReqs =
2505 vk_find_struct<VkMemoryDedicatedRequirements>(reqs2);
2506
2507 if (!dedicatedReqs) return;
2508
2509 transformExternalResourceMemoryDedicatedRequirementsForGuest(
2510 dedicatedReqs);
2511 }
2512
transformBufferMemoryRequirements2ForGuest(VkBuffer buffer,VkMemoryRequirements2 * reqs2)2513 void transformBufferMemoryRequirements2ForGuest(
2514 VkBuffer buffer,
2515 VkMemoryRequirements2* reqs2) {
2516
2517 AutoLock lock(mLock);
2518
2519 auto it = info_VkBuffer.find(buffer);
2520 if (it == info_VkBuffer.end()) return;
2521
2522 auto& info = it->second;
2523
2524 if (!info.external ||
2525 !info.externalCreateInfo.handleTypes) {
2526 transformNonExternalResourceMemoryRequirementsForGuest(
2527 &reqs2->memoryRequirements);
2528 return;
2529 }
2530
2531 transformExternalResourceMemoryRequirementsForGuest(&reqs2->memoryRequirements);
2532
2533 VkMemoryDedicatedRequirements* dedicatedReqs =
2534 vk_find_struct<VkMemoryDedicatedRequirements>(reqs2);
2535
2536 if (!dedicatedReqs) return;
2537
2538 transformExternalResourceMemoryDedicatedRequirementsForGuest(
2539 dedicatedReqs);
2540 }
2541
on_vkCreateImage(void * context,VkResult,VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)2542 VkResult on_vkCreateImage(
2543 void* context, VkResult,
2544 VkDevice device, const VkImageCreateInfo *pCreateInfo,
2545 const VkAllocationCallbacks *pAllocator,
2546 VkImage *pImage) {
2547 VkEncoder* enc = (VkEncoder*)context;
2548
2549 VkImageCreateInfo localCreateInfo = vk_make_orphan_copy(*pCreateInfo);
2550 vk_struct_chain_iterator structChainIter = vk_make_chain_iterator(&localCreateInfo);
2551 VkExternalMemoryImageCreateInfo localExtImgCi;
2552
2553 const VkExternalMemoryImageCreateInfo* extImgCiPtr =
2554 vk_find_struct<VkExternalMemoryImageCreateInfo>(pCreateInfo);
2555 if (extImgCiPtr) {
2556 localExtImgCi = vk_make_orphan_copy(*extImgCiPtr);
2557 vk_append_struct(&structChainIter, &localExtImgCi);
2558 }
2559
2560 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2561 VkNativeBufferANDROID localAnb;
2562 const VkNativeBufferANDROID* anbInfoPtr =
2563 vk_find_struct<VkNativeBufferANDROID>(pCreateInfo);
2564 if (anbInfoPtr) {
2565 localAnb = vk_make_orphan_copy(*anbInfoPtr);
2566 vk_append_struct(&structChainIter, &localAnb);
2567 }
2568
2569 VkExternalFormatANDROID localExtFormatAndroid;
2570 const VkExternalFormatANDROID* extFormatAndroidPtr =
2571 vk_find_struct<VkExternalFormatANDROID>(pCreateInfo);
2572 if (extFormatAndroidPtr) {
2573 localExtFormatAndroid = vk_make_orphan_copy(*extFormatAndroidPtr);
2574
2575 // Do not append external format android;
2576 // instead, replace the local image localCreateInfo format
2577 // with the corresponding Vulkan format
2578 if (extFormatAndroidPtr->externalFormat) {
2579 localCreateInfo.format =
2580 vk_format_from_android(extFormatAndroidPtr->externalFormat);
2581 if (localCreateInfo.format == VK_FORMAT_UNDEFINED)
2582 return VK_ERROR_VALIDATION_FAILED_EXT;
2583 }
2584 }
2585 #endif
2586
2587 #ifdef VK_USE_PLATFORM_FUCHSIA
2588 const VkBufferCollectionImageCreateInfoFUCHSIA* extBufferCollectionPtr =
2589 vk_find_struct<VkBufferCollectionImageCreateInfoFUCHSIA>(pCreateInfo);
2590 bool isSysmemBackedMemory = false;
2591 if (extBufferCollectionPtr) {
2592 auto collection = reinterpret_cast<fuchsia::sysmem::BufferCollectionSyncPtr*>(
2593 extBufferCollectionPtr->collection);
2594 uint32_t index = extBufferCollectionPtr->index;
2595 zx::vmo vmo;
2596
2597 fuchsia::sysmem::BufferCollectionInfo_2 info;
2598 zx_status_t status2;
2599 zx_status_t status = (*collection)->WaitForBuffersAllocated(&status2, &info);
2600 if (status == ZX_OK && status2 == ZX_OK) {
2601 if (index < info.buffer_count) {
2602 vmo = std::move(info.buffers[index].vmo);
2603 }
2604 } else {
2605 ALOGE("WaitForBuffersAllocated failed: %d %d", status, status2);
2606 }
2607
2608 if (vmo.is_valid()) {
2609 zx_status_t status2 = ZX_OK;
2610 status = mControlDevice->CreateColorBuffer(
2611 std::move(vmo),
2612 localCreateInfo.extent.width,
2613 localCreateInfo.extent.height,
2614 fuchsia::hardware::goldfish::ColorBufferFormatType::BGRA,
2615 &status2);
2616 if (status != ZX_OK || (status2 != ZX_OK && status2 != ZX_ERR_ALREADY_EXISTS)) {
2617 ALOGE("CreateColorBuffer failed: %d:%d", status, status2);
2618 }
2619 }
2620 isSysmemBackedMemory = true;
2621 }
2622 #endif
2623
2624 VkResult res;
2625 VkMemoryRequirements memReqs;
2626
2627 if (supportsCreateResourcesWithRequirements()) {
2628 res = enc->vkCreateImageWithRequirementsGOOGLE(device, &localCreateInfo, pAllocator, pImage, &memReqs);
2629 } else {
2630 res = enc->vkCreateImage(device, &localCreateInfo, pAllocator, pImage);
2631 }
2632
2633 if (res != VK_SUCCESS) return res;
2634
2635 AutoLock lock(mLock);
2636
2637 auto it = info_VkImage.find(*pImage);
2638 if (it == info_VkImage.end()) return VK_ERROR_INITIALIZATION_FAILED;
2639
2640 auto& info = it->second;
2641
2642 info.device = device;
2643 info.createInfo = *pCreateInfo;
2644 info.createInfo.pNext = nullptr;
2645
2646 if (supportsCreateResourcesWithRequirements()) {
2647 info.baseRequirementsKnown = true;
2648 }
2649
2650 if (extImgCiPtr) {
2651 info.external = true;
2652 info.externalCreateInfo = *extImgCiPtr;
2653 }
2654
2655 #ifdef VK_USE_PLATFORM_FUCHSIA
2656 if (isSysmemBackedMemory) {
2657 info.isSysmemBackedMemory = true;
2658 }
2659 #endif
2660
2661 if (info.baseRequirementsKnown) {
2662 transformImageMemoryRequirementsForGuestLocked(*pImage, &memReqs);
2663 info.baseRequirements = memReqs;
2664 }
2665 return res;
2666 }
2667
on_vkCreateSamplerYcbcrConversion(void * context,VkResult,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)2668 VkResult on_vkCreateSamplerYcbcrConversion(
2669 void* context, VkResult,
2670 VkDevice device,
2671 const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
2672 const VkAllocationCallbacks* pAllocator,
2673 VkSamplerYcbcrConversion* pYcbcrConversion) {
2674
2675 VkSamplerYcbcrConversionCreateInfo localCreateInfo = vk_make_orphan_copy(*pCreateInfo);
2676
2677 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2678 const VkExternalFormatANDROID* extFormatAndroidPtr =
2679 vk_find_struct<VkExternalFormatANDROID>(pCreateInfo);
2680 if (extFormatAndroidPtr) {
2681 if (extFormatAndroidPtr->externalFormat == AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM) {
2682 // We don't support external formats on host and it causes RGB565
2683 // to fail in CtsGraphicsTestCases android.graphics.cts.BasicVulkanGpuTest
2684 // when passed as an external format.
2685 // We may consider doing this for all external formats.
2686 // See b/134771579.
2687 *pYcbcrConversion = VK_YCBCR_CONVERSION_DO_NOTHING;
2688 return VK_SUCCESS;
2689 } else if (extFormatAndroidPtr->externalFormat) {
2690 localCreateInfo.format =
2691 vk_format_from_android(extFormatAndroidPtr->externalFormat);
2692 }
2693 }
2694 #endif
2695
2696 VkEncoder* enc = (VkEncoder*)context;
2697 VkResult res = enc->vkCreateSamplerYcbcrConversion(
2698 device, &localCreateInfo, pAllocator, pYcbcrConversion);
2699
2700 if (*pYcbcrConversion == VK_YCBCR_CONVERSION_DO_NOTHING) {
2701 ALOGE("FATAL: vkCreateSamplerYcbcrConversion returned a reserved value (VK_YCBCR_CONVERSION_DO_NOTHING)");
2702 abort();
2703 }
2704 return res;
2705 }
2706
on_vkDestroySamplerYcbcrConversion(void * context,VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)2707 void on_vkDestroySamplerYcbcrConversion(
2708 void* context,
2709 VkDevice device,
2710 VkSamplerYcbcrConversion ycbcrConversion,
2711 const VkAllocationCallbacks* pAllocator) {
2712 VkEncoder* enc = (VkEncoder*)context;
2713 if (ycbcrConversion != VK_YCBCR_CONVERSION_DO_NOTHING) {
2714 enc->vkDestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
2715 }
2716 }
2717
on_vkCreateSamplerYcbcrConversionKHR(void * context,VkResult,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)2718 VkResult on_vkCreateSamplerYcbcrConversionKHR(
2719 void* context, VkResult,
2720 VkDevice device,
2721 const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
2722 const VkAllocationCallbacks* pAllocator,
2723 VkSamplerYcbcrConversion* pYcbcrConversion) {
2724
2725 VkSamplerYcbcrConversionCreateInfo localCreateInfo = vk_make_orphan_copy(*pCreateInfo);
2726
2727 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2728 const VkExternalFormatANDROID* extFormatAndroidPtr =
2729 vk_find_struct<VkExternalFormatANDROID>(pCreateInfo);
2730 if (extFormatAndroidPtr) {
2731 if (extFormatAndroidPtr->externalFormat == AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM) {
2732 // We don't support external formats on host and it causes RGB565
2733 // to fail in CtsGraphicsTestCases android.graphics.cts.BasicVulkanGpuTest
2734 // when passed as an external format.
2735 // We may consider doing this for all external formats.
2736 // See b/134771579.
2737 *pYcbcrConversion = VK_YCBCR_CONVERSION_DO_NOTHING;
2738 return VK_SUCCESS;
2739 } else if (extFormatAndroidPtr->externalFormat) {
2740 localCreateInfo.format =
2741 vk_format_from_android(extFormatAndroidPtr->externalFormat);
2742 }
2743 }
2744 #endif
2745
2746 VkEncoder* enc = (VkEncoder*)context;
2747 VkResult res = enc->vkCreateSamplerYcbcrConversionKHR(
2748 device, &localCreateInfo, pAllocator, pYcbcrConversion);
2749
2750 if (*pYcbcrConversion == VK_YCBCR_CONVERSION_DO_NOTHING) {
2751 ALOGE("FATAL: vkCreateSamplerYcbcrConversionKHR returned a reserved value (VK_YCBCR_CONVERSION_DO_NOTHING)");
2752 abort();
2753 }
2754 return res;
2755 }
2756
on_vkDestroySamplerYcbcrConversionKHR(void * context,VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)2757 void on_vkDestroySamplerYcbcrConversionKHR(
2758 void* context,
2759 VkDevice device,
2760 VkSamplerYcbcrConversion ycbcrConversion,
2761 const VkAllocationCallbacks* pAllocator) {
2762 VkEncoder* enc = (VkEncoder*)context;
2763 if (ycbcrConversion != VK_YCBCR_CONVERSION_DO_NOTHING) {
2764 enc->vkDestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator);
2765 }
2766 }
2767
on_vkCreateSampler(void * context,VkResult,VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)2768 VkResult on_vkCreateSampler(
2769 void* context, VkResult,
2770 VkDevice device,
2771 const VkSamplerCreateInfo* pCreateInfo,
2772 const VkAllocationCallbacks* pAllocator,
2773 VkSampler* pSampler) {
2774
2775 VkSamplerCreateInfo localCreateInfo = vk_make_orphan_copy(*pCreateInfo);
2776 vk_struct_chain_iterator structChainIter = vk_make_chain_iterator(&localCreateInfo);
2777
2778 #if defined(VK_USE_PLATFORM_ANDROID_KHR) || defined(VK_USE_PLATFORM_FUCHSIA_KHR)
2779 VkSamplerYcbcrConversionInfo localVkSamplerYcbcrConversionInfo;
2780 const VkSamplerYcbcrConversionInfo* samplerYcbcrConversionInfo =
2781 vk_find_struct<VkSamplerYcbcrConversionInfo>(pCreateInfo);
2782 if (samplerYcbcrConversionInfo) {
2783 if (samplerYcbcrConversionInfo->conversion != VK_YCBCR_CONVERSION_DO_NOTHING) {
2784 localVkSamplerYcbcrConversionInfo = vk_make_orphan_copy(*samplerYcbcrConversionInfo);
2785 vk_append_struct(&structChainIter, &localVkSamplerYcbcrConversionInfo);
2786 }
2787 }
2788 #endif
2789
2790 VkEncoder* enc = (VkEncoder*)context;
2791 return enc->vkCreateSampler(device, &localCreateInfo, pAllocator, pSampler);
2792 }
2793
on_vkGetPhysicalDeviceExternalFenceProperties(void * context,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)2794 void on_vkGetPhysicalDeviceExternalFenceProperties(
2795 void* context,
2796 VkPhysicalDevice physicalDevice,
2797 const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
2798 VkExternalFenceProperties* pExternalFenceProperties) {
2799
2800 (void)context;
2801 (void)physicalDevice;
2802
2803 pExternalFenceProperties->exportFromImportedHandleTypes = 0;
2804 pExternalFenceProperties->compatibleHandleTypes = 0;
2805 pExternalFenceProperties->externalFenceFeatures = 0;
2806
2807 bool syncFd =
2808 pExternalFenceInfo->handleType &
2809 VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT;
2810
2811 if (!syncFd) {
2812 return;
2813 }
2814
2815 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2816 pExternalFenceProperties->exportFromImportedHandleTypes =
2817 VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT;
2818 pExternalFenceProperties->compatibleHandleTypes =
2819 VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT;
2820 pExternalFenceProperties->externalFenceFeatures =
2821 VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT |
2822 VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT;
2823
2824 ALOGD("%s: asked for sync fd, set the features\n", __func__);
2825 #endif
2826 }
2827
on_vkCreateFence(void * context,VkResult input_result,VkDevice device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)2828 VkResult on_vkCreateFence(
2829 void* context,
2830 VkResult input_result,
2831 VkDevice device,
2832 const VkFenceCreateInfo* pCreateInfo,
2833 const VkAllocationCallbacks* pAllocator, VkFence* pFence) {
2834
2835 VkEncoder* enc = (VkEncoder*)context;
2836 VkFenceCreateInfo finalCreateInfo = *pCreateInfo;
2837
2838 const VkExportFenceCreateInfo* exportFenceInfoPtr =
2839 vk_find_struct<VkExportFenceCreateInfo>(pCreateInfo);
2840
2841 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2842 bool exportSyncFd =
2843 exportFenceInfoPtr &&
2844 (exportFenceInfoPtr->handleTypes &
2845 VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT);
2846
2847 if (exportSyncFd) {
2848 ALOGV("%s: exporting sync fd, do not send pNext to host\n", __func__);
2849 finalCreateInfo.pNext = nullptr;
2850 }
2851 #endif
2852
2853 input_result = enc->vkCreateFence(
2854 device, &finalCreateInfo, pAllocator, pFence);
2855
2856 if (input_result != VK_SUCCESS) return input_result;
2857
2858 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2859 if (exportSyncFd) {
2860 ALOGV("%s: ensure sync device\n", __func__);
2861 ensureSyncDeviceFd();
2862
2863 ALOGV("%s: getting fence info\n", __func__);
2864 AutoLock lock(mLock);
2865 auto it = info_VkFence.find(*pFence);
2866
2867 if (it == info_VkFence.end())
2868 return VK_ERROR_INITIALIZATION_FAILED;
2869
2870 auto& info = it->second;
2871
2872 info.external = true;
2873 info.exportFenceCreateInfo = *exportFenceInfoPtr;
2874 ALOGV("%s: info set (fence still -1). fence: %p\n", __func__, (void*)(*pFence));
2875 // syncFd is still -1 because we expect user to explicitly
2876 // export it via vkGetFenceFdKHR
2877 }
2878 #endif
2879
2880 return input_result;
2881 }
2882
on_vkDestroyFence(void * context,VkDevice device,VkFence fence,const VkAllocationCallbacks * pAllocator)2883 void on_vkDestroyFence(
2884 void* context,
2885 VkDevice device,
2886 VkFence fence,
2887 const VkAllocationCallbacks* pAllocator) {
2888 VkEncoder* enc = (VkEncoder*)context;
2889 enc->vkDestroyFence(device, fence, pAllocator);
2890 }
2891
on_vkResetFences(void * context,VkResult,VkDevice device,uint32_t fenceCount,const VkFence * pFences)2892 VkResult on_vkResetFences(
2893 void* context,
2894 VkResult,
2895 VkDevice device,
2896 uint32_t fenceCount,
2897 const VkFence* pFences) {
2898
2899 VkEncoder* enc = (VkEncoder*)context;
2900 VkResult res = enc->vkResetFences(device, fenceCount, pFences);
2901
2902 if (res != VK_SUCCESS) return res;
2903
2904 if (!fenceCount) return res;
2905
2906 // Permanence: temporary
2907 // on fence reset, close the fence fd
2908 // and act like we need to GetFenceFdKHR/ImportFenceFdKHR again
2909 AutoLock lock(mLock);
2910 for (uint32_t i = 0; i < fenceCount; ++i) {
2911 VkFence fence = pFences[i];
2912 auto it = info_VkFence.find(fence);
2913 auto& info = it->second;
2914 if (!info.external) continue;
2915
2916 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2917 if (info.syncFd >= 0) {
2918 ALOGV("%s: resetting fence. make fd -1\n", __func__);
2919 goldfish_sync_signal(info.syncFd);
2920 close(info.syncFd);
2921 info.syncFd = -1;
2922 }
2923 #endif
2924 }
2925
2926 return res;
2927 }
2928
on_vkImportFenceFdKHR(void * context,VkResult,VkDevice device,const VkImportFenceFdInfoKHR * pImportFenceFdInfo)2929 VkResult on_vkImportFenceFdKHR(
2930 void* context,
2931 VkResult,
2932 VkDevice device,
2933 const VkImportFenceFdInfoKHR* pImportFenceFdInfo) {
2934
2935 (void)context;
2936 (void)device;
2937 (void)pImportFenceFdInfo;
2938
2939 // Transference: copy
2940 // meaning dup() the incoming fd
2941
2942 VkEncoder* enc = (VkEncoder*)context;
2943
2944 bool hasFence = pImportFenceFdInfo->fence != VK_NULL_HANDLE;
2945
2946 if (!hasFence) return VK_ERROR_OUT_OF_HOST_MEMORY;
2947
2948 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2949
2950 bool syncFdImport =
2951 pImportFenceFdInfo->handleType & VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT;
2952
2953 if (!syncFdImport) {
2954 ALOGV("%s: VK_ERROR_OUT_OF_HOST_MEMORY: no sync fd import\n", __func__);
2955 return VK_ERROR_OUT_OF_HOST_MEMORY;
2956 }
2957
2958 AutoLock lock(mLock);
2959 auto it = info_VkFence.find(pImportFenceFdInfo->fence);
2960 if (it == info_VkFence.end()) {
2961 ALOGV("%s: VK_ERROR_OUT_OF_HOST_MEMORY: no fence info\n", __func__);
2962 return VK_ERROR_OUT_OF_HOST_MEMORY;
2963 }
2964
2965 auto& info = it->second;
2966
2967 if (info.syncFd >= 0) {
2968 ALOGV("%s: previous sync fd exists, close it\n", __func__);
2969 goldfish_sync_signal(info.syncFd);
2970 close(info.syncFd);
2971 }
2972
2973 if (pImportFenceFdInfo->fd < 0) {
2974 ALOGV("%s: import -1, set to -1 and exit\n", __func__);
2975 info.syncFd = -1;
2976 } else {
2977 ALOGV("%s: import actual fd, dup and close()\n", __func__);
2978 info.syncFd = dup(pImportFenceFdInfo->fd);
2979 close(pImportFenceFdInfo->fd);
2980 }
2981 return VK_SUCCESS;
2982 #else
2983 return VK_ERROR_OUT_OF_HOST_MEMORY;
2984 #endif
2985 }
2986
on_vkGetFenceFdKHR(void * context,VkResult,VkDevice device,const VkFenceGetFdInfoKHR * pGetFdInfo,int * pFd)2987 VkResult on_vkGetFenceFdKHR(
2988 void* context,
2989 VkResult,
2990 VkDevice device,
2991 const VkFenceGetFdInfoKHR* pGetFdInfo,
2992 int* pFd) {
2993
2994 // export operation.
2995 // first check if fence is signaled
2996 // then if so, return -1
2997 // else, queue work
2998
2999 VkEncoder* enc = (VkEncoder*)context;
3000
3001 bool hasFence = pGetFdInfo->fence != VK_NULL_HANDLE;
3002
3003 if (!hasFence) {
3004 ALOGV("%s: VK_ERROR_OUT_OF_HOST_MEMORY: no fence\n", __func__);
3005 return VK_ERROR_OUT_OF_HOST_MEMORY;
3006 }
3007
3008 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3009 bool syncFdExport =
3010 pGetFdInfo->handleType & VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT;
3011
3012 if (!syncFdExport) {
3013 ALOGV("%s: VK_ERROR_OUT_OF_HOST_MEMORY: no sync fd fence\n", __func__);
3014 return VK_ERROR_OUT_OF_HOST_MEMORY;
3015 }
3016
3017 VkResult currentFenceStatus = enc->vkGetFenceStatus(device, pGetFdInfo->fence);
3018
3019 if (VK_SUCCESS == currentFenceStatus) { // Fence already signaled
3020 ALOGV("%s: VK_SUCCESS: already signaled\n", __func__);
3021 *pFd = -1;
3022 return VK_SUCCESS;
3023 }
3024
3025 if (VK_ERROR_DEVICE_LOST == currentFenceStatus) { // Other error
3026 ALOGV("%s: VK_ERROR_DEVICE_LOST: Other error\n", __func__);
3027 *pFd = -1;
3028 return VK_ERROR_DEVICE_LOST;
3029 }
3030
3031 if (VK_NOT_READY == currentFenceStatus) { // Fence unsignaled; create fd here
3032 AutoLock lock(mLock);
3033
3034 auto it = info_VkFence.find(pGetFdInfo->fence);
3035 if (it == info_VkFence.end()) {
3036 ALOGV("%s: VK_ERROR_OUT_OF_HOST_MEMORY: no fence info\n", __func__);
3037 return VK_ERROR_OUT_OF_HOST_MEMORY;
3038 }
3039
3040 auto& info = it->second;
3041
3042 bool syncFdCreated =
3043 info.external &&
3044 (info.exportFenceCreateInfo.handleTypes &
3045 VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT);
3046
3047 if (!syncFdCreated) {
3048 ALOGV("%s: VK_ERROR_OUT_OF_HOST_MEMORY: no sync fd created\n", __func__);
3049 return VK_ERROR_OUT_OF_HOST_MEMORY;
3050 }
3051
3052 goldfish_sync_queue_work(
3053 mSyncDeviceFd,
3054 get_host_u64_VkFence(pGetFdInfo->fence) /* the handle */,
3055 GOLDFISH_SYNC_VULKAN_SEMAPHORE_SYNC /* thread handle (doubling as type field) */,
3056 pFd);
3057 // relinquish ownership
3058 info.syncFd = -1;
3059 ALOGV("%s: got fd: %d\n", __func__, *pFd);
3060 return VK_SUCCESS;
3061 }
3062 return VK_ERROR_DEVICE_LOST;
3063 #else
3064 return VK_ERROR_OUT_OF_HOST_MEMORY;
3065 #endif
3066 }
3067
on_vkWaitForFences(void * context,VkResult,VkDevice device,uint32_t fenceCount,const VkFence * pFences,VkBool32 waitAll,uint64_t timeout)3068 VkResult on_vkWaitForFences(
3069 void* context,
3070 VkResult,
3071 VkDevice device,
3072 uint32_t fenceCount,
3073 const VkFence* pFences,
3074 VkBool32 waitAll,
3075 uint64_t timeout) {
3076
3077 VkEncoder* enc = (VkEncoder*)context;
3078
3079 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3080 std::vector<VkFence> fencesExternal;
3081 std::vector<int> fencesExternalWaitFds;
3082 std::vector<VkFence> fencesNonExternal;
3083
3084 AutoLock lock(mLock);
3085
3086 for (uint32_t i = 0; i < fenceCount; ++i) {
3087 auto it = info_VkFence.find(pFences[i]);
3088 if (it == info_VkFence.end()) continue;
3089 const auto& info = it->second;
3090 if (info.syncFd >= 0) {
3091 fencesExternal.push_back(pFences[i]);
3092 fencesExternalWaitFds.push_back(info.syncFd);
3093 } else {
3094 fencesNonExternal.push_back(pFences[i]);
3095 }
3096 }
3097
3098 if (fencesExternal.empty()) {
3099 // No need for work pool, just wait with host driver.
3100 return enc->vkWaitForFences(
3101 device, fenceCount, pFences, waitAll, timeout);
3102 } else {
3103 // Depending on wait any or wait all,
3104 // schedule a wait group with waitAny/waitAll
3105 std::vector<WorkPool::Task> tasks;
3106
3107 ALOGV("%s: scheduling ext waits\n", __func__);
3108
3109 for (auto fd : fencesExternalWaitFds) {
3110 ALOGV("%s: wait on %d\n", __func__, fd);
3111 tasks.push_back([fd] {
3112 sync_wait(fd, 3000);
3113 ALOGV("done waiting on fd %d\n", fd);
3114 });
3115 }
3116
3117 if (!fencesNonExternal.empty()) {
3118 tasks.push_back([this,
3119 fencesNonExternal /* copy of vector */,
3120 device, waitAll, timeout] {
3121 auto hostConn = mThreadingCallbacks.hostConnectionGetFunc();
3122 auto vkEncoder = mThreadingCallbacks.vkEncoderGetFunc(hostConn);
3123 ALOGV("%s: vkWaitForFences to host\n", __func__);
3124 vkEncoder->vkWaitForFences(device, fencesNonExternal.size(), fencesNonExternal.data(), waitAll, timeout);
3125 });
3126 }
3127
3128 auto waitGroupHandle = mWorkPool.schedule(tasks);
3129
3130 // Convert timeout to microseconds from nanoseconds
3131 bool waitRes = false;
3132 if (waitAll) {
3133 waitRes = mWorkPool.waitAll(waitGroupHandle, timeout / 1000);
3134 } else {
3135 waitRes = mWorkPool.waitAny(waitGroupHandle, timeout / 1000);
3136 }
3137
3138 if (waitRes) {
3139 ALOGV("%s: VK_SUCCESS\n", __func__);
3140 return VK_SUCCESS;
3141 } else {
3142 ALOGV("%s: VK_TIMEOUT\n", __func__);
3143 return VK_TIMEOUT;
3144 }
3145 }
3146 #else
3147 return enc->vkWaitForFences(
3148 device, fenceCount, pFences, waitAll, timeout);
3149 #endif
3150 }
3151
on_vkCreateDescriptorPool(void * context,VkResult,VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)3152 VkResult on_vkCreateDescriptorPool(
3153 void* context,
3154 VkResult,
3155 VkDevice device,
3156 const VkDescriptorPoolCreateInfo* pCreateInfo,
3157 const VkAllocationCallbacks* pAllocator,
3158 VkDescriptorPool* pDescriptorPool) {
3159
3160 VkEncoder* enc = (VkEncoder*)context;
3161
3162 VkResult res = enc->vkCreateDescriptorPool(
3163 device, pCreateInfo, pAllocator, pDescriptorPool);
3164
3165 if (res != VK_SUCCESS) return res;
3166
3167 AutoLock lock(mLock);
3168 auto it = info_VkDescriptorPool.find(*pDescriptorPool);
3169 if (it == info_VkDescriptorPool.end()) return res;
3170
3171 auto &info = it->second;
3172 info.createFlags = pCreateInfo->flags;
3173
3174 return res;
3175 }
3176
on_vkDestroyDescriptorPool(void * context,VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)3177 void on_vkDestroyDescriptorPool(
3178 void* context,
3179 VkDevice device,
3180 VkDescriptorPool descriptorPool,
3181 const VkAllocationCallbacks* pAllocator) {
3182
3183 VkEncoder* enc = (VkEncoder*)context;
3184
3185 enc->vkDestroyDescriptorPool(device, descriptorPool, pAllocator);
3186 }
3187
on_vkResetDescriptorPool(void * context,VkResult,VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)3188 VkResult on_vkResetDescriptorPool(
3189 void* context,
3190 VkResult,
3191 VkDevice device,
3192 VkDescriptorPool descriptorPool,
3193 VkDescriptorPoolResetFlags flags) {
3194
3195 VkEncoder* enc = (VkEncoder*)context;
3196
3197 VkResult res = enc->vkResetDescriptorPool(device, descriptorPool, flags);
3198
3199 if (res != VK_SUCCESS) return res;
3200
3201 AutoLock lock(mLock);
3202 clearDescriptorPoolLocked(descriptorPool);
3203 return res;
3204 }
3205
on_vkAllocateDescriptorSets(void * context,VkResult,VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)3206 VkResult on_vkAllocateDescriptorSets(
3207 void* context,
3208 VkResult,
3209 VkDevice device,
3210 const VkDescriptorSetAllocateInfo* pAllocateInfo,
3211 VkDescriptorSet* pDescriptorSets) {
3212
3213 VkEncoder* enc = (VkEncoder*)context;
3214
3215 VkResult res = enc->vkAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
3216
3217 if (res != VK_SUCCESS) return res;
3218
3219 AutoLock lock(mLock);
3220 initDescriptorSetStateLocked(pAllocateInfo, pDescriptorSets);
3221 return res;
3222 }
3223
on_vkFreeDescriptorSets(void * context,VkResult,VkDevice device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)3224 VkResult on_vkFreeDescriptorSets(
3225 void* context,
3226 VkResult,
3227 VkDevice device,
3228 VkDescriptorPool descriptorPool,
3229 uint32_t descriptorSetCount,
3230 const VkDescriptorSet* pDescriptorSets) {
3231
3232 VkEncoder* enc = (VkEncoder*)context;
3233
3234 // Bit of robustness so that we can double free descriptor sets
3235 // and do other invalid usages
3236 // https://github.com/KhronosGroup/Vulkan-Docs/issues/1070
3237 // (people expect VK_SUCCESS to always be returned by vkFreeDescriptorSets)
3238 std::vector<VkDescriptorSet> toActuallyFree;
3239 {
3240 AutoLock lock(mLock);
3241
3242 if (!descriptorPoolSupportsIndividualFreeLocked(descriptorPool))
3243 return VK_SUCCESS;
3244
3245 for (uint32_t i = 0; i < descriptorSetCount; ++i) {
3246 if (descriptorSetReallyAllocedFromPoolLocked(
3247 pDescriptorSets[i], descriptorPool)) {
3248 toActuallyFree.push_back(pDescriptorSets[i]);
3249 }
3250 }
3251
3252 if (toActuallyFree.empty()) return VK_SUCCESS;
3253 }
3254
3255 return enc->vkFreeDescriptorSets(
3256 device, descriptorPool,
3257 (uint32_t)toActuallyFree.size(),
3258 toActuallyFree.data());
3259 }
3260
on_vkCreateDescriptorSetLayout(void * context,VkResult,VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)3261 VkResult on_vkCreateDescriptorSetLayout(
3262 void* context,
3263 VkResult,
3264 VkDevice device,
3265 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
3266 const VkAllocationCallbacks* pAllocator,
3267 VkDescriptorSetLayout* pSetLayout) {
3268
3269 VkEncoder* enc = (VkEncoder*)context;
3270
3271 VkResult res = enc->vkCreateDescriptorSetLayout(
3272 device, pCreateInfo, pAllocator, pSetLayout);
3273
3274 if (res != VK_SUCCESS) return res;
3275
3276 AutoLock lock(mLock);
3277
3278 auto it = info_VkDescriptorSetLayout.find(*pSetLayout);
3279 if (it == info_VkDescriptorSetLayout.end()) return res;
3280
3281 auto& info = it->second;
3282 for (uint32_t i = 0; i < pCreateInfo->bindingCount; ++i) {
3283 info.bindings.push_back(pCreateInfo->pBindings[i]);
3284 }
3285
3286 return res;
3287 }
3288
on_vkUpdateDescriptorSets(void * context,VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)3289 void on_vkUpdateDescriptorSets(
3290 void* context,
3291 VkDevice device,
3292 uint32_t descriptorWriteCount,
3293 const VkWriteDescriptorSet* pDescriptorWrites,
3294 uint32_t descriptorCopyCount,
3295 const VkCopyDescriptorSet* pDescriptorCopies) {
3296
3297 VkEncoder* enc = (VkEncoder*)context;
3298
3299 std::vector<std::vector<VkDescriptorImageInfo>> imageInfosPerWrite(
3300 descriptorWriteCount);
3301
3302 std::vector<VkWriteDescriptorSet> writesWithSuppressedSamplers;
3303
3304 {
3305 AutoLock lock(mLock);
3306 for (uint32_t i = 0; i < descriptorWriteCount; ++i) {
3307 writesWithSuppressedSamplers.push_back(
3308 createImmutableSamplersFilteredWriteDescriptorSetLocked(
3309 pDescriptorWrites + i,
3310 imageInfosPerWrite.data() + i));
3311 }
3312 }
3313
3314 enc->vkUpdateDescriptorSets(
3315 device, descriptorWriteCount, writesWithSuppressedSamplers.data(),
3316 descriptorCopyCount, pDescriptorCopies);
3317 }
3318
on_vkDestroyImage(void * context,VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)3319 void on_vkDestroyImage(
3320 void* context,
3321 VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
3322 VkEncoder* enc = (VkEncoder*)context;
3323 enc->vkDestroyImage(device, image, pAllocator);
3324 }
3325
setMemoryRequirementsForSysmemBackedImage(VkImage image,VkMemoryRequirements * pMemoryRequirements)3326 void setMemoryRequirementsForSysmemBackedImage(
3327 VkImage image, VkMemoryRequirements *pMemoryRequirements) {
3328 #ifdef VK_USE_PLATFORM_FUCHSIA
3329 auto it = info_VkImage.find(image);
3330 if (it == info_VkImage.end()) return;
3331 auto& info = it->second;
3332 if (info.isSysmemBackedMemory) {
3333 auto width = info.createInfo.extent.width;
3334 auto height = info.createInfo.extent.height;
3335 pMemoryRequirements->size = width * height * 4;
3336 }
3337 #else
3338 // Bypass "unused parameter" checks.
3339 (void)image;
3340 (void)pMemoryRequirements;
3341 #endif
3342 }
3343
on_vkGetImageMemoryRequirements(void * context,VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)3344 void on_vkGetImageMemoryRequirements(
3345 void *context, VkDevice device, VkImage image,
3346 VkMemoryRequirements *pMemoryRequirements) {
3347
3348 AutoLock lock(mLock);
3349
3350 auto it = info_VkImage.find(image);
3351 if (it == info_VkImage.end()) return;
3352
3353 auto& info = it->second;
3354
3355 if (info.baseRequirementsKnown) {
3356 *pMemoryRequirements = info.baseRequirements;
3357 return;
3358 }
3359
3360 lock.unlock();
3361
3362 VkEncoder* enc = (VkEncoder*)context;
3363
3364 enc->vkGetImageMemoryRequirements(
3365 device, image, pMemoryRequirements);
3366
3367 lock.lock();
3368
3369 transformImageMemoryRequirementsForGuestLocked(
3370 image, pMemoryRequirements);
3371
3372 info.baseRequirementsKnown = true;
3373 info.baseRequirements = *pMemoryRequirements;
3374 }
3375
on_vkGetImageMemoryRequirements2(void * context,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3376 void on_vkGetImageMemoryRequirements2(
3377 void *context, VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
3378 VkMemoryRequirements2 *pMemoryRequirements) {
3379 VkEncoder* enc = (VkEncoder*)context;
3380 enc->vkGetImageMemoryRequirements2(
3381 device, pInfo, pMemoryRequirements);
3382 transformImageMemoryRequirements2ForGuest(
3383 pInfo->image, pMemoryRequirements);
3384 }
3385
on_vkGetImageMemoryRequirements2KHR(void * context,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3386 void on_vkGetImageMemoryRequirements2KHR(
3387 void *context, VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
3388 VkMemoryRequirements2 *pMemoryRequirements) {
3389 VkEncoder* enc = (VkEncoder*)context;
3390 enc->vkGetImageMemoryRequirements2KHR(
3391 device, pInfo, pMemoryRequirements);
3392 transformImageMemoryRequirements2ForGuest(
3393 pInfo->image, pMemoryRequirements);
3394 }
3395
on_vkBindImageMemory(void * context,VkResult,VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)3396 VkResult on_vkBindImageMemory(
3397 void* context, VkResult,
3398 VkDevice device, VkImage image, VkDeviceMemory memory,
3399 VkDeviceSize memoryOffset) {
3400 VkEncoder* enc = (VkEncoder*)context;
3401 return enc->vkBindImageMemory(device, image, memory, memoryOffset);
3402 }
3403
on_vkBindImageMemory2(void * context,VkResult,VkDevice device,uint32_t bindingCount,const VkBindImageMemoryInfo * pBindInfos)3404 VkResult on_vkBindImageMemory2(
3405 void* context, VkResult,
3406 VkDevice device, uint32_t bindingCount, const VkBindImageMemoryInfo* pBindInfos) {
3407 VkEncoder* enc = (VkEncoder*)context;
3408 return enc->vkBindImageMemory2(device, bindingCount, pBindInfos);
3409 }
3410
on_vkBindImageMemory2KHR(void * context,VkResult,VkDevice device,uint32_t bindingCount,const VkBindImageMemoryInfo * pBindInfos)3411 VkResult on_vkBindImageMemory2KHR(
3412 void* context, VkResult,
3413 VkDevice device, uint32_t bindingCount, const VkBindImageMemoryInfo* pBindInfos) {
3414 VkEncoder* enc = (VkEncoder*)context;
3415 return enc->vkBindImageMemory2KHR(device, bindingCount, pBindInfos);
3416 }
3417
on_vkCreateBuffer(void * context,VkResult,VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)3418 VkResult on_vkCreateBuffer(
3419 void* context, VkResult,
3420 VkDevice device, const VkBufferCreateInfo *pCreateInfo,
3421 const VkAllocationCallbacks *pAllocator,
3422 VkBuffer *pBuffer) {
3423 VkEncoder* enc = (VkEncoder*)context;
3424
3425 VkResult res;
3426 VkMemoryRequirements memReqs;
3427
3428 if (supportsCreateResourcesWithRequirements()) {
3429 res = enc->vkCreateBufferWithRequirementsGOOGLE(device, pCreateInfo, pAllocator, pBuffer, &memReqs);
3430 } else {
3431 res = enc->vkCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
3432 }
3433
3434 if (res != VK_SUCCESS) return res;
3435
3436 AutoLock lock(mLock);
3437
3438 auto it = info_VkBuffer.find(*pBuffer);
3439 if (it == info_VkBuffer.end()) return VK_ERROR_INITIALIZATION_FAILED;
3440
3441 auto& info = it->second;
3442
3443 info.createInfo = *pCreateInfo;
3444 info.createInfo.pNext = nullptr;
3445
3446 if (supportsCreateResourcesWithRequirements()) {
3447 info.baseRequirementsKnown = true;
3448 }
3449
3450 const VkExternalMemoryBufferCreateInfo* extBufCi =
3451 vk_find_struct<VkExternalMemoryBufferCreateInfo>(pCreateInfo);
3452
3453 if (extBufCi) {
3454 info.external = true;
3455 info.externalCreateInfo = *extBufCi;
3456 }
3457
3458 if (info.baseRequirementsKnown) {
3459 transformBufferMemoryRequirementsForGuestLocked(*pBuffer, &memReqs);
3460 info.baseRequirements = memReqs;
3461 }
3462
3463 return res;
3464 }
3465
on_vkDestroyBuffer(void * context,VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)3466 void on_vkDestroyBuffer(
3467 void* context,
3468 VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
3469 VkEncoder* enc = (VkEncoder*)context;
3470 enc->vkDestroyBuffer(device, buffer, pAllocator);
3471 }
3472
on_vkGetBufferMemoryRequirements(void * context,VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)3473 void on_vkGetBufferMemoryRequirements(
3474 void* context, VkDevice device, VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
3475
3476 AutoLock lock(mLock);
3477
3478 auto it = info_VkBuffer.find(buffer);
3479 if (it == info_VkBuffer.end()) return;
3480
3481 auto& info = it->second;
3482
3483 if (info.baseRequirementsKnown) {
3484 *pMemoryRequirements = info.baseRequirements;
3485 return;
3486 }
3487
3488 lock.unlock();
3489
3490 VkEncoder* enc = (VkEncoder*)context;
3491 enc->vkGetBufferMemoryRequirements(
3492 device, buffer, pMemoryRequirements);
3493
3494 lock.lock();
3495
3496 transformBufferMemoryRequirementsForGuestLocked(
3497 buffer, pMemoryRequirements);
3498 info.baseRequirementsKnown = true;
3499 info.baseRequirements = *pMemoryRequirements;
3500 }
3501
on_vkGetBufferMemoryRequirements2(void * context,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3502 void on_vkGetBufferMemoryRequirements2(
3503 void* context, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
3504 VkMemoryRequirements2* pMemoryRequirements) {
3505 VkEncoder* enc = (VkEncoder*)context;
3506 enc->vkGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
3507 transformBufferMemoryRequirements2ForGuest(
3508 pInfo->buffer, pMemoryRequirements);
3509 }
3510
on_vkGetBufferMemoryRequirements2KHR(void * context,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3511 void on_vkGetBufferMemoryRequirements2KHR(
3512 void* context, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
3513 VkMemoryRequirements2* pMemoryRequirements) {
3514 VkEncoder* enc = (VkEncoder*)context;
3515 enc->vkGetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
3516 transformBufferMemoryRequirements2ForGuest(
3517 pInfo->buffer, pMemoryRequirements);
3518 }
3519
on_vkBindBufferMemory(void * context,VkResult,VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)3520 VkResult on_vkBindBufferMemory(
3521 void *context, VkResult,
3522 VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) {
3523 VkEncoder *enc = (VkEncoder *)context;
3524 return enc->vkBindBufferMemory(
3525 device, buffer, memory, memoryOffset);
3526 }
3527
on_vkBindBufferMemory2(void * context,VkResult,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)3528 VkResult on_vkBindBufferMemory2(
3529 void *context, VkResult,
3530 VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos) {
3531 VkEncoder *enc = (VkEncoder *)context;
3532 return enc->vkBindBufferMemory2(
3533 device, bindInfoCount, pBindInfos);
3534 }
3535
on_vkBindBufferMemory2KHR(void * context,VkResult,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)3536 VkResult on_vkBindBufferMemory2KHR(
3537 void *context, VkResult,
3538 VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos) {
3539 VkEncoder *enc = (VkEncoder *)context;
3540 return enc->vkBindBufferMemory2KHR(
3541 device, bindInfoCount, pBindInfos);
3542 }
3543
ensureSyncDeviceFd()3544 void ensureSyncDeviceFd() {
3545 if (mSyncDeviceFd >= 0) return;
3546 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3547 mSyncDeviceFd = goldfish_sync_open();
3548 if (mSyncDeviceFd >= 0) {
3549 ALOGD("%s: created sync device for current Vulkan process: %d\n", __func__, mSyncDeviceFd);
3550 } else {
3551 ALOGD("%s: failed to create sync device for current Vulkan process\n", __func__);
3552 }
3553 #endif
3554 }
3555
on_vkCreateSemaphore(void * context,VkResult input_result,VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)3556 VkResult on_vkCreateSemaphore(
3557 void* context, VkResult input_result,
3558 VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo,
3559 const VkAllocationCallbacks* pAllocator,
3560 VkSemaphore* pSemaphore) {
3561
3562 VkEncoder* enc = (VkEncoder*)context;
3563
3564 VkSemaphoreCreateInfo finalCreateInfo = *pCreateInfo;
3565
3566 const VkExportSemaphoreCreateInfoKHR* exportSemaphoreInfoPtr =
3567 vk_find_struct<VkExportSemaphoreCreateInfoKHR>(pCreateInfo);
3568
3569 #ifdef VK_USE_PLATFORM_FUCHSIA
3570 bool exportEvent = exportSemaphoreInfoPtr &&
3571 (exportSemaphoreInfoPtr->handleTypes &
3572 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_TEMP_ZIRCON_EVENT_BIT_FUCHSIA);
3573
3574 if (exportEvent) {
3575 finalCreateInfo.pNext = nullptr;
3576 }
3577 #endif
3578
3579 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3580 bool exportSyncFd = exportSemaphoreInfoPtr &&
3581 (exportSemaphoreInfoPtr->handleTypes &
3582 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT);
3583
3584 if (exportSyncFd) {
3585 finalCreateInfo.pNext = nullptr;
3586 }
3587 #endif
3588 input_result = enc->vkCreateSemaphore(
3589 device, &finalCreateInfo, pAllocator, pSemaphore);
3590
3591 zx_handle_t event_handle = ZX_HANDLE_INVALID;
3592
3593 #ifdef VK_USE_PLATFORM_FUCHSIA
3594 if (exportEvent) {
3595 zx_event_create(0, &event_handle);
3596 }
3597 #endif
3598
3599 AutoLock lock(mLock);
3600
3601 auto it = info_VkSemaphore.find(*pSemaphore);
3602 if (it == info_VkSemaphore.end()) return VK_ERROR_INITIALIZATION_FAILED;
3603
3604 auto& info = it->second;
3605
3606 info.device = device;
3607 info.eventHandle = event_handle;
3608
3609 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3610 if (exportSyncFd) {
3611
3612 ensureSyncDeviceFd();
3613
3614 if (exportSyncFd) {
3615 int syncFd = -1;
3616 goldfish_sync_queue_work(
3617 mSyncDeviceFd,
3618 get_host_u64_VkSemaphore(*pSemaphore) /* the handle */,
3619 GOLDFISH_SYNC_VULKAN_SEMAPHORE_SYNC /* thread handle (doubling as type field) */,
3620 &syncFd);
3621 info.syncFd = syncFd;
3622 }
3623 }
3624 #endif
3625
3626 return VK_SUCCESS;
3627 }
3628
on_vkDestroySemaphore(void * context,VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)3629 void on_vkDestroySemaphore(
3630 void* context,
3631 VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) {
3632 VkEncoder* enc = (VkEncoder*)context;
3633 enc->vkDestroySemaphore(device, semaphore, pAllocator);
3634 }
3635
3636 // https://www.khronos.org/registry/vulkan/specs/1.0-extensions/html/vkspec.html#vkGetSemaphoreFdKHR
3637 // Each call to vkGetSemaphoreFdKHR must create a new file descriptor and transfer ownership
3638 // of it to the application. To avoid leaking resources, the application must release ownership
3639 // of the file descriptor when it is no longer needed.
on_vkGetSemaphoreFdKHR(void * context,VkResult,VkDevice device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)3640 VkResult on_vkGetSemaphoreFdKHR(
3641 void* context, VkResult,
3642 VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
3643 int* pFd) {
3644 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3645 VkEncoder* enc = (VkEncoder*)context;
3646 bool getSyncFd =
3647 pGetFdInfo->handleType & VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
3648
3649 if (getSyncFd) {
3650 AutoLock lock(mLock);
3651 auto it = info_VkSemaphore.find(pGetFdInfo->semaphore);
3652 if (it == info_VkSemaphore.end()) return VK_ERROR_OUT_OF_HOST_MEMORY;
3653 auto& semInfo = it->second;
3654 *pFd = dup(semInfo.syncFd);
3655 return VK_SUCCESS;
3656 } else {
3657 // opaque fd
3658 int hostFd = 0;
3659 VkResult result = enc->vkGetSemaphoreFdKHR(device, pGetFdInfo, &hostFd);
3660 if (result != VK_SUCCESS) {
3661 return result;
3662 }
3663 *pFd = memfd_create("vk_opaque_fd", 0);
3664 write(*pFd, &hostFd, sizeof(hostFd));
3665 return VK_SUCCESS;
3666 }
3667 #else
3668 (void)context;
3669 (void)device;
3670 (void)pGetFdInfo;
3671 (void)pFd;
3672 return VK_ERROR_INCOMPATIBLE_DRIVER;
3673 #endif
3674 }
3675
on_vkImportSemaphoreFdKHR(void * context,VkResult input_result,VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo)3676 VkResult on_vkImportSemaphoreFdKHR(
3677 void* context, VkResult input_result,
3678 VkDevice device,
3679 const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
3680 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3681 VkEncoder* enc = (VkEncoder*)context;
3682 if (input_result != VK_SUCCESS) {
3683 return input_result;
3684 }
3685
3686 if (pImportSemaphoreFdInfo->handleType &
3687 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT) {
3688 VkImportSemaphoreFdInfoKHR tmpInfo = *pImportSemaphoreFdInfo;
3689
3690 AutoLock lock(mLock);
3691
3692 auto semaphoreIt = info_VkSemaphore.find(pImportSemaphoreFdInfo->semaphore);
3693 auto& info = semaphoreIt->second;
3694
3695 if (info.syncFd >= 0) {
3696 close(info.syncFd);
3697 }
3698
3699 info.syncFd = pImportSemaphoreFdInfo->fd;
3700
3701 return VK_SUCCESS;
3702 } else {
3703 int fd = pImportSemaphoreFdInfo->fd;
3704 int err = lseek(fd, 0, SEEK_SET);
3705 if (err == -1) {
3706 ALOGE("lseek fail on import semaphore");
3707 }
3708 int hostFd = 0;
3709 read(fd, &hostFd, sizeof(hostFd));
3710 VkImportSemaphoreFdInfoKHR tmpInfo = *pImportSemaphoreFdInfo;
3711 tmpInfo.fd = hostFd;
3712 VkResult result = enc->vkImportSemaphoreFdKHR(device, &tmpInfo);
3713 close(fd);
3714 return result;
3715 }
3716 #else
3717 (void)context;
3718 (void)input_result;
3719 (void)device;
3720 (void)pImportSemaphoreFdInfo;
3721 return VK_ERROR_INCOMPATIBLE_DRIVER;
3722 #endif
3723 }
3724
on_vkQueueSubmit(void * context,VkResult input_result,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)3725 VkResult on_vkQueueSubmit(
3726 void* context, VkResult input_result,
3727 VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence) {
3728
3729 std::vector<VkSemaphore> pre_signal_semaphores;
3730 std::vector<zx_handle_t> pre_signal_events;
3731 std::vector<int> pre_signal_sync_fds;
3732 std::vector<zx_handle_t> post_wait_events;
3733 std::vector<int> post_wait_sync_fds;
3734
3735 VkEncoder* enc = (VkEncoder*)context;
3736
3737 AutoLock lock(mLock);
3738
3739 for (uint32_t i = 0; i < submitCount; ++i) {
3740 for (uint32_t j = 0; j < pSubmits[i].waitSemaphoreCount; ++j) {
3741 auto it = info_VkSemaphore.find(pSubmits[i].pWaitSemaphores[j]);
3742 if (it != info_VkSemaphore.end()) {
3743 auto& semInfo = it->second;
3744 #ifdef VK_USE_PLATFORM_FUCHSIA
3745 if (semInfo.eventHandle) {
3746 pre_signal_events.push_back(semInfo.eventHandle);
3747 pre_signal_semaphores.push_back(pSubmits[i].pWaitSemaphores[j]);
3748 }
3749 #endif
3750 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3751 if (semInfo.syncFd >= 0) {
3752 pre_signal_sync_fds.push_back(semInfo.syncFd);
3753 pre_signal_semaphores.push_back(pSubmits[i].pWaitSemaphores[j]);
3754 }
3755 #endif
3756 }
3757 }
3758 for (uint32_t j = 0; j < pSubmits[i].signalSemaphoreCount; ++j) {
3759 auto it = info_VkSemaphore.find(pSubmits[i].pSignalSemaphores[j]);
3760 if (it != info_VkSemaphore.end()) {
3761 auto& semInfo = it->second;
3762 #ifdef VK_USE_PLATFORM_FUCHSIA
3763 if (semInfo.eventHandle) {
3764 post_wait_events.push_back(semInfo.eventHandle);
3765 }
3766 #endif
3767 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3768 if (semInfo.syncFd >= 0) {
3769 post_wait_sync_fds.push_back(semInfo.syncFd);
3770 }
3771 #endif
3772 }
3773 }
3774 }
3775 lock.unlock();
3776
3777 if (pre_signal_semaphores.empty()) {
3778 input_result = enc->vkQueueSubmit(queue, submitCount, pSubmits, fence);
3779 if (input_result != VK_SUCCESS) return input_result;
3780 } else {
3781 // Schedule waits on the OS external objects and
3782 // signal the wait semaphores
3783 // in a separate thread.
3784 std::vector<WorkPool::Task> preSignalTasks;
3785 std::vector<WorkPool::Task> preSignalQueueSubmitTasks;;
3786 #ifdef VK_USE_PLATFORM_FUCHSIA
3787 for (auto event : pre_signal_events) {
3788 preSignalTasks.push_back([event] {
3789 zx_object_wait_one(
3790 event,
3791 ZX_EVENT_SIGNALED,
3792 ZX_TIME_INFINITE,
3793 nullptr);
3794 });
3795 }
3796 #endif
3797 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3798 for (auto fd : pre_signal_sync_fds) {
3799 preSignalTasks.push_back([fd] {
3800 sync_wait(fd, 3000);
3801 });
3802 }
3803 #endif
3804 auto waitGroupHandle = mWorkPool.schedule(preSignalTasks);
3805 mWorkPool.waitAll(waitGroupHandle);
3806
3807 VkSubmitInfo submit_info = {
3808 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
3809 .waitSemaphoreCount = 0,
3810 .pWaitSemaphores = nullptr,
3811 .pWaitDstStageMask = nullptr,
3812 .signalSemaphoreCount = static_cast<uint32_t>(pre_signal_semaphores.size()),
3813 .pSignalSemaphores = pre_signal_semaphores.data()};
3814 enc->vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
3815
3816 input_result = enc->vkQueueSubmit(queue, submitCount, pSubmits, fence);
3817 if (input_result != VK_SUCCESS) return input_result;
3818 }
3819
3820 lock.lock();
3821 int externalFenceFdToSignal = -1;
3822
3823 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3824 if (fence != VK_NULL_HANDLE) {
3825 auto it = info_VkFence.find(fence);
3826 if (it != info_VkFence.end()) {
3827 const auto& info = it->second;
3828 if (info.syncFd >= 0) {
3829 externalFenceFdToSignal = info.syncFd;
3830 }
3831 }
3832 }
3833 #endif
3834 if (externalFenceFdToSignal >= 0 ||
3835 !post_wait_events.empty() ||
3836 !post_wait_sync_fds.empty()) {
3837
3838 std::vector<WorkPool::Task> tasks;
3839
3840 tasks.push_back([this, queue, externalFenceFdToSignal,
3841 post_wait_events /* copy of zx handles */,
3842 post_wait_sync_fds /* copy of sync fds */] {
3843 auto hostConn = mThreadingCallbacks.hostConnectionGetFunc();
3844 auto vkEncoder = mThreadingCallbacks.vkEncoderGetFunc(hostConn);
3845 auto waitIdleRes = vkEncoder->vkQueueWaitIdle(queue);
3846 #ifdef VK_USE_PLATFORM_FUCHSIA
3847 (void)externalFenceFdToSignal;
3848 for (auto& event : post_wait_events) {
3849 zx_object_signal(event, 0, ZX_EVENT_SIGNALED);
3850 }
3851 #endif
3852 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3853 for (auto& fd : post_wait_sync_fds) {
3854 goldfish_sync_signal(fd);
3855 }
3856
3857 if (externalFenceFdToSignal >= 0) {
3858 ALOGV("%s: external fence real signal: %d\n", __func__, externalFenceFdToSignal);
3859 goldfish_sync_signal(externalFenceFdToSignal);
3860 }
3861 #endif
3862 });
3863 auto queueAsyncWaitHandle = mWorkPool.schedule(tasks);
3864 auto& queueWorkItems = mQueueSensitiveWorkPoolItems[queue];
3865 queueWorkItems.push_back(queueAsyncWaitHandle);
3866 }
3867
3868 return VK_SUCCESS;
3869 }
3870
on_vkQueueWaitIdle(void * context,VkResult,VkQueue queue)3871 VkResult on_vkQueueWaitIdle(
3872 void* context, VkResult,
3873 VkQueue queue) {
3874
3875 VkEncoder* enc = (VkEncoder*)context;
3876
3877 AutoLock lock(mLock);
3878 std::vector<WorkPool::WaitGroupHandle> toWait =
3879 mQueueSensitiveWorkPoolItems[queue];
3880 mQueueSensitiveWorkPoolItems[queue].clear();
3881 lock.unlock();
3882
3883 if (toWait.empty()) {
3884 ALOGV("%s: No queue-specific work pool items\n", __func__);
3885 return enc->vkQueueWaitIdle(queue);
3886 }
3887
3888 for (auto handle : toWait) {
3889 ALOGV("%s: waiting on work group item: %llu\n", __func__,
3890 (unsigned long long)handle);
3891 mWorkPool.waitAll(handle);
3892 }
3893
3894 // now done waiting, get the host's opinion
3895 return enc->vkQueueWaitIdle(queue);
3896 }
3897
unwrap_VkNativeBufferANDROID(const VkImageCreateInfo * pCreateInfo,VkImageCreateInfo * local_pCreateInfo)3898 void unwrap_VkNativeBufferANDROID(
3899 const VkImageCreateInfo* pCreateInfo,
3900 VkImageCreateInfo* local_pCreateInfo) {
3901
3902 if (!pCreateInfo->pNext) return;
3903
3904 const VkNativeBufferANDROID* nativeInfo =
3905 vk_find_struct<VkNativeBufferANDROID>(pCreateInfo);
3906 if (!nativeInfo) {
3907 return;
3908 }
3909
3910 if (!nativeInfo->handle) return;
3911
3912 VkNativeBufferANDROID* nativeInfoOut =
3913 reinterpret_cast<VkNativeBufferANDROID*>(
3914 const_cast<void*>(
3915 local_pCreateInfo->pNext));
3916
3917 if (!nativeInfoOut->handle) {
3918 ALOGE("FATAL: Local native buffer info not properly allocated!");
3919 abort();
3920 }
3921
3922 *(uint32_t*)(nativeInfoOut->handle) =
3923 mThreadingCallbacks.hostConnectionGetFunc()->
3924 grallocHelper()->getHostHandle(
3925 (const native_handle_t*)nativeInfo->handle);
3926 }
3927
unwrap_vkAcquireImageANDROID_nativeFenceFd(int fd,int *)3928 void unwrap_vkAcquireImageANDROID_nativeFenceFd(int fd, int*) {
3929 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3930 if (fd != -1) {
3931 // Implicit Synchronization
3932 sync_wait(fd, 3000);
3933 // From libvulkan's swapchain.cpp:
3934 // """
3935 // NOTE: we're relying on AcquireImageANDROID to close fence_clone,
3936 // even if the call fails. We could close it ourselves on failure, but
3937 // that would create a race condition if the driver closes it on a
3938 // failure path: some other thread might create an fd with the same
3939 // number between the time the driver closes it and the time we close
3940 // it. We must assume one of: the driver *always* closes it even on
3941 // failure, or *never* closes it on failure.
3942 // """
3943 // Therefore, assume contract where we need to close fd in this driver
3944 close(fd);
3945 }
3946 #endif
3947 }
3948
3949 // Action of vkMapMemoryIntoAddressSpaceGOOGLE:
3950 // 1. preprocess (on_vkMapMemoryIntoAddressSpaceGOOGLE_pre):
3951 // uses address space device to reserve the right size of
3952 // memory.
3953 // 2. the reservation results in a physical address. the physical
3954 // address is set as |*pAddress|.
3955 // 3. after pre, the API call is encoded to the host, where the
3956 // value of pAddress is also sent (the physical address).
3957 // 4. the host will obtain the actual gpu pointer and send it
3958 // back out in |*pAddress|.
3959 // 5. postprocess (on_vkMapMemoryIntoAddressSpaceGOOGLE) will run,
3960 // using the mmap() method of GoldfishAddressSpaceBlock to obtain
3961 // a pointer in guest userspace corresponding to the host pointer.
on_vkMapMemoryIntoAddressSpaceGOOGLE_pre(void *,VkResult,VkDevice,VkDeviceMemory memory,uint64_t * pAddress)3962 VkResult on_vkMapMemoryIntoAddressSpaceGOOGLE_pre(
3963 void*,
3964 VkResult,
3965 VkDevice,
3966 VkDeviceMemory memory,
3967 uint64_t* pAddress) {
3968
3969 AutoLock lock(mLock);
3970
3971 auto it = info_VkDeviceMemory.find(memory);
3972 if (it == info_VkDeviceMemory.end()) {
3973 return VK_ERROR_OUT_OF_HOST_MEMORY;
3974 }
3975
3976 auto& memInfo = it->second;
3977 memInfo.goldfishAddressSpaceBlock =
3978 new GoldfishAddressSpaceBlock;
3979 auto& block = *(memInfo.goldfishAddressSpaceBlock);
3980
3981 block.allocate(
3982 mGoldfishAddressSpaceBlockProvider.get(),
3983 memInfo.mappedSize);
3984
3985 *pAddress = block.physAddr();
3986
3987 return VK_SUCCESS;
3988 }
3989
on_vkMapMemoryIntoAddressSpaceGOOGLE(void *,VkResult input_result,VkDevice,VkDeviceMemory memory,uint64_t * pAddress)3990 VkResult on_vkMapMemoryIntoAddressSpaceGOOGLE(
3991 void*,
3992 VkResult input_result,
3993 VkDevice,
3994 VkDeviceMemory memory,
3995 uint64_t* pAddress) {
3996
3997 if (input_result != VK_SUCCESS) {
3998 return input_result;
3999 }
4000
4001 // Now pAddress points to the gpu addr from host.
4002 AutoLock lock(mLock);
4003
4004 auto it = info_VkDeviceMemory.find(memory);
4005 if (it == info_VkDeviceMemory.end()) {
4006 return VK_ERROR_OUT_OF_HOST_MEMORY;
4007 }
4008
4009 auto& memInfo = it->second;
4010 auto& block = *(memInfo.goldfishAddressSpaceBlock);
4011
4012 uint64_t gpuAddr = *pAddress;
4013
4014 void* userPtr = block.mmap(gpuAddr);
4015
4016 D("%s: Got new host visible alloc. "
4017 "Sizeof void: %zu map size: %zu Range: [%p %p]",
4018 __func__,
4019 sizeof(void*), (size_t)memInfo.mappedSize,
4020 userPtr,
4021 (unsigned char*)userPtr + memInfo.mappedSize);
4022
4023 *pAddress = (uint64_t)(uintptr_t)userPtr;
4024
4025 return input_result;
4026 }
4027
isDescriptorTypeImageInfo(VkDescriptorType descType)4028 bool isDescriptorTypeImageInfo(VkDescriptorType descType) {
4029 return (descType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
4030 (descType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
4031 (descType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
4032 (descType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) ||
4033 (descType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
4034 }
4035
isDescriptorTypeBufferInfo(VkDescriptorType descType)4036 bool isDescriptorTypeBufferInfo(VkDescriptorType descType) {
4037 return (descType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
4038 (descType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
4039 (descType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
4040 (descType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
4041 }
4042
isDescriptorTypeBufferView(VkDescriptorType descType)4043 bool isDescriptorTypeBufferView(VkDescriptorType descType) {
4044 return (descType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
4045 (descType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
4046 }
4047
initDescriptorUpdateTemplateBuffers(const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,VkDescriptorUpdateTemplate descriptorUpdateTemplate)4048 VkResult initDescriptorUpdateTemplateBuffers(
4049 const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
4050 VkDescriptorUpdateTemplate descriptorUpdateTemplate) {
4051
4052 AutoLock lock(mLock);
4053
4054 auto it = info_VkDescriptorUpdateTemplate.find(descriptorUpdateTemplate);
4055 if (it == info_VkDescriptorUpdateTemplate.end()) {
4056 return VK_ERROR_INITIALIZATION_FAILED;
4057 }
4058
4059 auto& info = it->second;
4060
4061 size_t imageInfosNeeded = 0;
4062 size_t bufferInfosNeeded = 0;
4063 size_t bufferViewsNeeded = 0;
4064
4065 for (uint32_t i = 0; i < pCreateInfo->descriptorUpdateEntryCount; ++i) {
4066 const auto& entry = pCreateInfo->pDescriptorUpdateEntries[i];
4067 uint32_t descCount = entry.descriptorCount;
4068 VkDescriptorType descType = entry.descriptorType;
4069
4070 info.templateEntries.push_back(entry);
4071
4072 for (uint32_t j = 0; j < descCount; ++j) {
4073 if (isDescriptorTypeImageInfo(descType)) {
4074 ++imageInfosNeeded;
4075 info.imageInfoEntryIndices.push_back(i);
4076 } else if (isDescriptorTypeBufferInfo(descType)) {
4077 ++bufferInfosNeeded;
4078 info.bufferInfoEntryIndices.push_back(i);
4079 } else if (isDescriptorTypeBufferView(descType)) {
4080 ++bufferViewsNeeded;
4081 info.bufferViewEntryIndices.push_back(i);
4082 } else {
4083 ALOGE("%s: FATAL: Unknown descriptor type %d\n", __func__, descType);
4084 abort();
4085 }
4086 }
4087 }
4088
4089 // To be filled in later (our flat structure)
4090 info.imageInfos.resize(imageInfosNeeded);
4091 info.bufferInfos.resize(bufferInfosNeeded);
4092 info.bufferViews.resize(bufferViewsNeeded);
4093
4094 return VK_SUCCESS;
4095 }
4096
on_vkCreateDescriptorUpdateTemplate(void * context,VkResult input_result,VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)4097 VkResult on_vkCreateDescriptorUpdateTemplate(
4098 void* context, VkResult input_result,
4099 VkDevice device,
4100 const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
4101 const VkAllocationCallbacks* pAllocator,
4102 VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
4103
4104 (void)context;
4105 (void)device;
4106 (void)pAllocator;
4107
4108 if (input_result != VK_SUCCESS) return input_result;
4109
4110 return initDescriptorUpdateTemplateBuffers(pCreateInfo, *pDescriptorUpdateTemplate);
4111 }
4112
on_vkCreateDescriptorUpdateTemplateKHR(void * context,VkResult input_result,VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)4113 VkResult on_vkCreateDescriptorUpdateTemplateKHR(
4114 void* context, VkResult input_result,
4115 VkDevice device,
4116 const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
4117 const VkAllocationCallbacks* pAllocator,
4118 VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
4119
4120 (void)context;
4121 (void)device;
4122 (void)pAllocator;
4123
4124 if (input_result != VK_SUCCESS) return input_result;
4125
4126 return initDescriptorUpdateTemplateBuffers(pCreateInfo, *pDescriptorUpdateTemplate);
4127 }
4128
on_vkUpdateDescriptorSetWithTemplate(void * context,VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)4129 void on_vkUpdateDescriptorSetWithTemplate(
4130 void* context,
4131 VkDevice device,
4132 VkDescriptorSet descriptorSet,
4133 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4134 const void* pData) {
4135
4136 VkEncoder* enc = (VkEncoder*)context;
4137
4138 uint8_t* userBuffer = (uint8_t*)pData;
4139 if (!userBuffer) return;
4140
4141 AutoLock lock(mLock);
4142
4143 auto it = info_VkDescriptorUpdateTemplate.find(descriptorUpdateTemplate);
4144 if (it == info_VkDescriptorUpdateTemplate.end()) {
4145 return;
4146 }
4147
4148 auto& info = it->second;
4149
4150 size_t currImageInfoOffset = 0;
4151 size_t currBufferInfoOffset = 0;
4152 size_t currBufferViewOffset = 0;
4153
4154 for (const auto& entry : info.templateEntries) {
4155 VkDescriptorType descType = entry.descriptorType;
4156
4157 auto offset = entry.offset;
4158 auto stride = entry.stride;
4159
4160 uint32_t descCount = entry.descriptorCount;
4161
4162 if (isDescriptorTypeImageInfo(descType)) {
4163 if (!stride) stride = sizeof(VkDescriptorImageInfo);
4164 for (uint32_t j = 0; j < descCount; ++j) {
4165 memcpy(((uint8_t*)info.imageInfos.data()) + currImageInfoOffset,
4166 userBuffer + offset + j * stride,
4167 sizeof(VkDescriptorImageInfo));
4168 currImageInfoOffset += sizeof(VkDescriptorImageInfo);
4169 }
4170 } else if (isDescriptorTypeBufferInfo(descType)) {
4171 if (!stride) stride = sizeof(VkDescriptorBufferInfo);
4172 for (uint32_t j = 0; j < descCount; ++j) {
4173 memcpy(((uint8_t*)info.bufferInfos.data()) + currBufferInfoOffset,
4174 userBuffer + offset + j * stride,
4175 sizeof(VkDescriptorBufferInfo));
4176 currBufferInfoOffset += sizeof(VkDescriptorBufferInfo);
4177 }
4178 } else if (isDescriptorTypeBufferView(descType)) {
4179 if (!stride) stride = sizeof(VkBufferView);
4180 for (uint32_t j = 0; j < descCount; ++j) {
4181 memcpy(((uint8_t*)info.bufferViews.data()) + currBufferViewOffset,
4182 userBuffer + offset + j * stride,
4183 sizeof(VkBufferView));
4184 currBufferViewOffset += sizeof(VkBufferView);
4185 }
4186 } else {
4187 ALOGE("%s: FATAL: Unknown descriptor type %d\n", __func__, descType);
4188 abort();
4189 }
4190 }
4191
4192 enc->vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
4193 device,
4194 descriptorSet,
4195 descriptorUpdateTemplate,
4196 (uint32_t)info.imageInfos.size(),
4197 (uint32_t)info.bufferInfos.size(),
4198 (uint32_t)info.bufferViews.size(),
4199 info.imageInfoEntryIndices.data(),
4200 info.bufferInfoEntryIndices.data(),
4201 info.bufferViewEntryIndices.data(),
4202 info.imageInfos.data(),
4203 info.bufferInfos.data(),
4204 info.bufferViews.data());
4205 }
4206
on_vkGetPhysicalDeviceImageFormatProperties2_common(bool isKhr,void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)4207 VkResult on_vkGetPhysicalDeviceImageFormatProperties2_common(
4208 bool isKhr,
4209 void* context, VkResult input_result,
4210 VkPhysicalDevice physicalDevice,
4211 const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
4212 VkImageFormatProperties2* pImageFormatProperties) {
4213
4214 VkEncoder* enc = (VkEncoder*)context;
4215 (void)input_result;
4216
4217 VkAndroidHardwareBufferUsageANDROID* output_ahw_usage =
4218 vk_find_struct<VkAndroidHardwareBufferUsageANDROID>(pImageFormatProperties);
4219
4220 VkResult hostRes;
4221
4222 if (isKhr) {
4223 hostRes = enc->vkGetPhysicalDeviceImageFormatProperties2KHR(
4224 physicalDevice, pImageFormatInfo,
4225 pImageFormatProperties);
4226 } else {
4227 hostRes = enc->vkGetPhysicalDeviceImageFormatProperties2(
4228 physicalDevice, pImageFormatInfo,
4229 pImageFormatProperties);
4230 }
4231
4232 if (hostRes != VK_SUCCESS) return hostRes;
4233
4234 if (output_ahw_usage) {
4235 output_ahw_usage->androidHardwareBufferUsage =
4236 getAndroidHardwareBufferUsageFromVkUsage(
4237 pImageFormatInfo->flags,
4238 pImageFormatInfo->usage);
4239 }
4240
4241 return hostRes;
4242 }
4243
on_vkGetPhysicalDeviceImageFormatProperties2(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)4244 VkResult on_vkGetPhysicalDeviceImageFormatProperties2(
4245 void* context, VkResult input_result,
4246 VkPhysicalDevice physicalDevice,
4247 const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
4248 VkImageFormatProperties2* pImageFormatProperties) {
4249 return on_vkGetPhysicalDeviceImageFormatProperties2_common(
4250 false /* not KHR */, context, input_result,
4251 physicalDevice, pImageFormatInfo, pImageFormatProperties);
4252 }
4253
on_vkGetPhysicalDeviceImageFormatProperties2KHR(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)4254 VkResult on_vkGetPhysicalDeviceImageFormatProperties2KHR(
4255 void* context, VkResult input_result,
4256 VkPhysicalDevice physicalDevice,
4257 const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
4258 VkImageFormatProperties2* pImageFormatProperties) {
4259 return on_vkGetPhysicalDeviceImageFormatProperties2_common(
4260 true /* is KHR */, context, input_result,
4261 physicalDevice, pImageFormatInfo, pImageFormatProperties);
4262 }
4263
syncEncodersForCommandBuffer(VkCommandBuffer commandBuffer,VkEncoder * currentEncoder)4264 uint32_t syncEncodersForCommandBuffer(VkCommandBuffer commandBuffer, VkEncoder* currentEncoder) {
4265 AutoLock lock(mLock);
4266
4267 auto it = info_VkCommandBuffer.find(commandBuffer);
4268 if (it == info_VkCommandBuffer.end()) return 0;
4269
4270 auto& info = it->second;
4271
4272 if (!info.lastUsedEncoderPtr) {
4273 info.lastUsedEncoderPtr = new VkEncoder*;
4274 *(info.lastUsedEncoderPtr) = currentEncoder;
4275 }
4276
4277 auto lastUsedEncoderPtr = info.lastUsedEncoderPtr;
4278
4279 auto lastEncoder = *(lastUsedEncoderPtr);
4280
4281 // We always make lastUsedEncoderPtr track
4282 // the current encoder, even if the last encoder
4283 // is null.
4284 *(lastUsedEncoderPtr) = currentEncoder;
4285
4286 if (!lastEncoder) return 0;
4287 if (lastEncoder == currentEncoder) return 0;
4288
4289 info.sequenceNumber++;
4290 lastEncoder->vkCommandBufferHostSyncGOOGLE(commandBuffer, false, info.sequenceNumber);
4291 lastEncoder->flush();
4292 info.sequenceNumber++;
4293 currentEncoder->vkCommandBufferHostSyncGOOGLE(commandBuffer, true, info.sequenceNumber);
4294
4295 lastEncoder->unregisterCleanupCallback(commandBuffer);
4296
4297 currentEncoder->registerCleanupCallback(commandBuffer, [currentEncoder, lastUsedEncoderPtr]() {
4298 if (*(lastUsedEncoderPtr) == currentEncoder) {
4299 *(lastUsedEncoderPtr) = nullptr;
4300 }
4301 });
4302
4303 return 1;
4304 }
4305
on_vkBeginCommandBuffer(void * context,VkResult input_result,VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo)4306 VkResult on_vkBeginCommandBuffer(
4307 void* context, VkResult input_result,
4308 VkCommandBuffer commandBuffer,
4309 const VkCommandBufferBeginInfo* pBeginInfo) {
4310
4311 VkEncoder* enc = (VkEncoder*)context;
4312 (void)input_result;
4313
4314 if (!supportsDeferredCommands()) {
4315 return enc->vkBeginCommandBuffer(commandBuffer, pBeginInfo);
4316 }
4317
4318 enc->vkBeginCommandBufferAsyncGOOGLE(commandBuffer, pBeginInfo);
4319
4320 return VK_SUCCESS;
4321 }
4322
on_vkEndCommandBuffer(void * context,VkResult input_result,VkCommandBuffer commandBuffer)4323 VkResult on_vkEndCommandBuffer(
4324 void* context, VkResult input_result,
4325 VkCommandBuffer commandBuffer) {
4326
4327 VkEncoder* enc = (VkEncoder*)context;
4328 (void)input_result;
4329
4330 if (!supportsDeferredCommands()) {
4331 return enc->vkEndCommandBuffer(commandBuffer);
4332 }
4333
4334 enc->vkEndCommandBufferAsyncGOOGLE(commandBuffer);
4335
4336 return VK_SUCCESS;
4337 }
4338
on_vkResetCommandBuffer(void * context,VkResult input_result,VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)4339 VkResult on_vkResetCommandBuffer(
4340 void* context, VkResult input_result,
4341 VkCommandBuffer commandBuffer,
4342 VkCommandBufferResetFlags flags) {
4343
4344 VkEncoder* enc = (VkEncoder*)context;
4345 (void)input_result;
4346
4347 if (!supportsDeferredCommands()) {
4348 return enc->vkResetCommandBuffer(commandBuffer, flags);
4349 }
4350
4351 enc->vkResetCommandBufferAsyncGOOGLE(commandBuffer, flags);
4352 return VK_SUCCESS;
4353 }
4354
on_vkCreateImageView(void * context,VkResult input_result,VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)4355 VkResult on_vkCreateImageView(
4356 void* context, VkResult input_result,
4357 VkDevice device,
4358 const VkImageViewCreateInfo* pCreateInfo,
4359 const VkAllocationCallbacks* pAllocator,
4360 VkImageView* pView) {
4361
4362 VkEncoder* enc = (VkEncoder*)context;
4363 (void)input_result;
4364
4365 VkImageViewCreateInfo localCreateInfo = vk_make_orphan_copy(*pCreateInfo);
4366
4367 #ifdef VK_USE_PLATFORM_ANDROID_KHR
4368 const VkExternalFormatANDROID* extFormatAndroidPtr =
4369 vk_find_struct<VkExternalFormatANDROID>(pCreateInfo);
4370 if (extFormatAndroidPtr) {
4371 if (extFormatAndroidPtr->externalFormat) {
4372 localCreateInfo.format =
4373 vk_format_from_android(extFormatAndroidPtr->externalFormat);
4374 }
4375 }
4376 #endif
4377
4378 return enc->vkCreateImageView(device, &localCreateInfo, pAllocator, pView);
4379 }
4380
getApiVersionFromInstance(VkInstance instance) const4381 uint32_t getApiVersionFromInstance(VkInstance instance) const {
4382 AutoLock lock(mLock);
4383 uint32_t api = kDefaultApiVersion;
4384
4385 auto it = info_VkInstance.find(instance);
4386 if (it == info_VkInstance.end()) return api;
4387
4388 api = it->second.highestApiVersion;
4389
4390 return api;
4391 }
4392
getApiVersionFromDevice(VkDevice device) const4393 uint32_t getApiVersionFromDevice(VkDevice device) const {
4394 AutoLock lock(mLock);
4395
4396 uint32_t api = kDefaultApiVersion;
4397
4398 auto it = info_VkDevice.find(device);
4399 if (it == info_VkDevice.end()) return api;
4400
4401 api = it->second.apiVersion;
4402
4403 return api;
4404 }
4405
hasInstanceExtension(VkInstance instance,const std::string & name) const4406 bool hasInstanceExtension(VkInstance instance, const std::string& name) const {
4407 AutoLock lock(mLock);
4408
4409 auto it = info_VkInstance.find(instance);
4410 if (it == info_VkInstance.end()) return false;
4411
4412 return it->second.enabledExtensions.find(name) !=
4413 it->second.enabledExtensions.end();
4414 }
4415
hasDeviceExtension(VkDevice device,const std::string & name) const4416 bool hasDeviceExtension(VkDevice device, const std::string& name) const {
4417 AutoLock lock(mLock);
4418
4419 auto it = info_VkDevice.find(device);
4420 if (it == info_VkDevice.end()) return false;
4421
4422 return it->second.enabledExtensions.find(name) !=
4423 it->second.enabledExtensions.end();
4424 }
4425
4426 private:
4427 mutable Lock mLock;
4428 HostVisibleMemoryVirtualizationInfo mHostVisibleMemoryVirtInfo;
4429 std::unique_ptr<EmulatorFeatureInfo> mFeatureInfo;
4430 ResourceTracker::ThreadingCallbacks mThreadingCallbacks;
4431 uint32_t mStreamFeatureBits = 0;
4432 std::unique_ptr<GoldfishAddressSpaceBlockProvider> mGoldfishAddressSpaceBlockProvider;
4433
4434 std::vector<VkExtensionProperties> mHostInstanceExtensions;
4435 std::vector<VkExtensionProperties> mHostDeviceExtensions;
4436
4437 int mSyncDeviceFd = -1;
4438 #ifdef VK_USE_PLATFORM_ANDROID_KHR
4439 int mRendernodeFd = -1;
4440 #endif
4441
4442 #ifdef VK_USE_PLATFORM_FUCHSIA
4443 fuchsia::hardware::goldfish::ControlDeviceSyncPtr mControlDevice;
4444 fuchsia::sysmem::AllocatorSyncPtr mSysmemAllocator;
4445 #endif
4446
4447 WorkPool mWorkPool { 4 };
4448 std::unordered_map<VkQueue, std::vector<WorkPool::WaitGroupHandle>>
4449 mQueueSensitiveWorkPoolItems;
4450 };
4451
ResourceTracker()4452 ResourceTracker::ResourceTracker() : mImpl(new ResourceTracker::Impl()) { }
~ResourceTracker()4453 ResourceTracker::~ResourceTracker() { }
createMapping()4454 VulkanHandleMapping* ResourceTracker::createMapping() {
4455 return &mImpl->createMapping;
4456 }
unwrapMapping()4457 VulkanHandleMapping* ResourceTracker::unwrapMapping() {
4458 return &mImpl->unwrapMapping;
4459 }
destroyMapping()4460 VulkanHandleMapping* ResourceTracker::destroyMapping() {
4461 return &mImpl->destroyMapping;
4462 }
defaultMapping()4463 VulkanHandleMapping* ResourceTracker::defaultMapping() {
4464 return &mImpl->defaultMapping;
4465 }
4466 static ResourceTracker* sTracker = nullptr;
4467 // static
get()4468 ResourceTracker* ResourceTracker::get() {
4469 if (!sTracker) {
4470 // To be initialized once on vulkan device open.
4471 sTracker = new ResourceTracker;
4472 }
4473 return sTracker;
4474 }
4475
4476 #define HANDLE_REGISTER_IMPL(type) \
4477 void ResourceTracker::register_##type(type obj) { \
4478 mImpl->register_##type(obj); \
4479 } \
4480 void ResourceTracker::unregister_##type(type obj) { \
4481 mImpl->unregister_##type(obj); \
4482 } \
4483
GOLDFISH_VK_LIST_HANDLE_TYPES(HANDLE_REGISTER_IMPL)4484 GOLDFISH_VK_LIST_HANDLE_TYPES(HANDLE_REGISTER_IMPL)
4485
4486 bool ResourceTracker::isMemoryTypeHostVisible(
4487 VkDevice device, uint32_t typeIndex) const {
4488 return mImpl->isMemoryTypeHostVisible(device, typeIndex);
4489 }
4490
getMappedPointer(VkDeviceMemory memory)4491 uint8_t* ResourceTracker::getMappedPointer(VkDeviceMemory memory) {
4492 return mImpl->getMappedPointer(memory);
4493 }
4494
getMappedSize(VkDeviceMemory memory)4495 VkDeviceSize ResourceTracker::getMappedSize(VkDeviceMemory memory) {
4496 return mImpl->getMappedSize(memory);
4497 }
4498
getNonCoherentExtendedSize(VkDevice device,VkDeviceSize basicSize) const4499 VkDeviceSize ResourceTracker::getNonCoherentExtendedSize(VkDevice device, VkDeviceSize basicSize) const {
4500 return mImpl->getNonCoherentExtendedSize(device, basicSize);
4501 }
4502
isValidMemoryRange(const VkMappedMemoryRange & range) const4503 bool ResourceTracker::isValidMemoryRange(const VkMappedMemoryRange& range) const {
4504 return mImpl->isValidMemoryRange(range);
4505 }
4506
setupFeatures(const EmulatorFeatureInfo * features)4507 void ResourceTracker::setupFeatures(const EmulatorFeatureInfo* features) {
4508 mImpl->setupFeatures(features);
4509 }
4510
setThreadingCallbacks(const ResourceTracker::ThreadingCallbacks & callbacks)4511 void ResourceTracker::setThreadingCallbacks(const ResourceTracker::ThreadingCallbacks& callbacks) {
4512 mImpl->setThreadingCallbacks(callbacks);
4513 }
4514
hostSupportsVulkan() const4515 bool ResourceTracker::hostSupportsVulkan() const {
4516 return mImpl->hostSupportsVulkan();
4517 }
4518
usingDirectMapping() const4519 bool ResourceTracker::usingDirectMapping() const {
4520 return mImpl->usingDirectMapping();
4521 }
4522
getStreamFeatures() const4523 uint32_t ResourceTracker::getStreamFeatures() const {
4524 return mImpl->getStreamFeatures();
4525 }
4526
getApiVersionFromInstance(VkInstance instance) const4527 uint32_t ResourceTracker::getApiVersionFromInstance(VkInstance instance) const {
4528 return mImpl->getApiVersionFromInstance(instance);
4529 }
4530
getApiVersionFromDevice(VkDevice device) const4531 uint32_t ResourceTracker::getApiVersionFromDevice(VkDevice device) const {
4532 return mImpl->getApiVersionFromDevice(device);
4533 }
hasInstanceExtension(VkInstance instance,const std::string & name) const4534 bool ResourceTracker::hasInstanceExtension(VkInstance instance, const std::string &name) const {
4535 return mImpl->hasInstanceExtension(instance, name);
4536 }
hasDeviceExtension(VkDevice device,const std::string & name) const4537 bool ResourceTracker::hasDeviceExtension(VkDevice device, const std::string &name) const {
4538 return mImpl->hasDeviceExtension(device, name);
4539 }
4540
on_vkEnumerateInstanceExtensionProperties(void * context,VkResult input_result,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)4541 VkResult ResourceTracker::on_vkEnumerateInstanceExtensionProperties(
4542 void* context,
4543 VkResult input_result,
4544 const char* pLayerName,
4545 uint32_t* pPropertyCount,
4546 VkExtensionProperties* pProperties) {
4547 return mImpl->on_vkEnumerateInstanceExtensionProperties(
4548 context, input_result, pLayerName, pPropertyCount, pProperties);
4549 }
4550
on_vkEnumerateDeviceExtensionProperties(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)4551 VkResult ResourceTracker::on_vkEnumerateDeviceExtensionProperties(
4552 void* context,
4553 VkResult input_result,
4554 VkPhysicalDevice physicalDevice,
4555 const char* pLayerName,
4556 uint32_t* pPropertyCount,
4557 VkExtensionProperties* pProperties) {
4558 return mImpl->on_vkEnumerateDeviceExtensionProperties(
4559 context, input_result, physicalDevice, pLayerName, pPropertyCount, pProperties);
4560 }
4561
on_vkEnumeratePhysicalDevices(void * context,VkResult input_result,VkInstance instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)4562 VkResult ResourceTracker::on_vkEnumeratePhysicalDevices(
4563 void* context, VkResult input_result,
4564 VkInstance instance, uint32_t* pPhysicalDeviceCount,
4565 VkPhysicalDevice* pPhysicalDevices) {
4566 return mImpl->on_vkEnumeratePhysicalDevices(
4567 context, input_result, instance, pPhysicalDeviceCount,
4568 pPhysicalDevices);
4569 }
4570
on_vkGetPhysicalDeviceMemoryProperties(void * context,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)4571 void ResourceTracker::on_vkGetPhysicalDeviceMemoryProperties(
4572 void* context,
4573 VkPhysicalDevice physicalDevice,
4574 VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
4575 mImpl->on_vkGetPhysicalDeviceMemoryProperties(
4576 context, physicalDevice, pMemoryProperties);
4577 }
4578
on_vkGetPhysicalDeviceMemoryProperties2(void * context,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)4579 void ResourceTracker::on_vkGetPhysicalDeviceMemoryProperties2(
4580 void* context,
4581 VkPhysicalDevice physicalDevice,
4582 VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
4583 mImpl->on_vkGetPhysicalDeviceMemoryProperties2(
4584 context, physicalDevice, pMemoryProperties);
4585 }
4586
on_vkGetPhysicalDeviceMemoryProperties2KHR(void * context,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)4587 void ResourceTracker::on_vkGetPhysicalDeviceMemoryProperties2KHR(
4588 void* context,
4589 VkPhysicalDevice physicalDevice,
4590 VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
4591 mImpl->on_vkGetPhysicalDeviceMemoryProperties2(
4592 context, physicalDevice, pMemoryProperties);
4593 }
4594
on_vkCreateInstance(void * context,VkResult input_result,const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)4595 VkResult ResourceTracker::on_vkCreateInstance(
4596 void* context,
4597 VkResult input_result,
4598 const VkInstanceCreateInfo* pCreateInfo,
4599 const VkAllocationCallbacks* pAllocator,
4600 VkInstance* pInstance) {
4601 return mImpl->on_vkCreateInstance(
4602 context, input_result, pCreateInfo, pAllocator, pInstance);
4603 }
4604
on_vkCreateDevice(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)4605 VkResult ResourceTracker::on_vkCreateDevice(
4606 void* context,
4607 VkResult input_result,
4608 VkPhysicalDevice physicalDevice,
4609 const VkDeviceCreateInfo* pCreateInfo,
4610 const VkAllocationCallbacks* pAllocator,
4611 VkDevice* pDevice) {
4612 return mImpl->on_vkCreateDevice(
4613 context, input_result, physicalDevice, pCreateInfo, pAllocator, pDevice);
4614 }
4615
on_vkDestroyDevice_pre(void * context,VkDevice device,const VkAllocationCallbacks * pAllocator)4616 void ResourceTracker::on_vkDestroyDevice_pre(
4617 void* context,
4618 VkDevice device,
4619 const VkAllocationCallbacks* pAllocator) {
4620 mImpl->on_vkDestroyDevice_pre(context, device, pAllocator);
4621 }
4622
on_vkAllocateMemory(void * context,VkResult input_result,VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)4623 VkResult ResourceTracker::on_vkAllocateMemory(
4624 void* context,
4625 VkResult input_result,
4626 VkDevice device,
4627 const VkMemoryAllocateInfo* pAllocateInfo,
4628 const VkAllocationCallbacks* pAllocator,
4629 VkDeviceMemory* pMemory) {
4630 return mImpl->on_vkAllocateMemory(
4631 context, input_result, device, pAllocateInfo, pAllocator, pMemory);
4632 }
4633
on_vkFreeMemory(void * context,VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)4634 void ResourceTracker::on_vkFreeMemory(
4635 void* context,
4636 VkDevice device,
4637 VkDeviceMemory memory,
4638 const VkAllocationCallbacks* pAllocator) {
4639 return mImpl->on_vkFreeMemory(
4640 context, device, memory, pAllocator);
4641 }
4642
on_vkMapMemory(void * context,VkResult input_result,VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)4643 VkResult ResourceTracker::on_vkMapMemory(
4644 void* context,
4645 VkResult input_result,
4646 VkDevice device,
4647 VkDeviceMemory memory,
4648 VkDeviceSize offset,
4649 VkDeviceSize size,
4650 VkMemoryMapFlags flags,
4651 void** ppData) {
4652 return mImpl->on_vkMapMemory(
4653 context, input_result, device, memory, offset, size, flags, ppData);
4654 }
4655
on_vkUnmapMemory(void * context,VkDevice device,VkDeviceMemory memory)4656 void ResourceTracker::on_vkUnmapMemory(
4657 void* context,
4658 VkDevice device,
4659 VkDeviceMemory memory) {
4660 mImpl->on_vkUnmapMemory(context, device, memory);
4661 }
4662
on_vkCreateImage(void * context,VkResult input_result,VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)4663 VkResult ResourceTracker::on_vkCreateImage(
4664 void* context, VkResult input_result,
4665 VkDevice device, const VkImageCreateInfo *pCreateInfo,
4666 const VkAllocationCallbacks *pAllocator,
4667 VkImage *pImage) {
4668 return mImpl->on_vkCreateImage(
4669 context, input_result,
4670 device, pCreateInfo, pAllocator, pImage);
4671 }
4672
on_vkDestroyImage(void * context,VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)4673 void ResourceTracker::on_vkDestroyImage(
4674 void* context,
4675 VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
4676 mImpl->on_vkDestroyImage(context,
4677 device, image, pAllocator);
4678 }
4679
on_vkGetImageMemoryRequirements(void * context,VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)4680 void ResourceTracker::on_vkGetImageMemoryRequirements(
4681 void *context, VkDevice device, VkImage image,
4682 VkMemoryRequirements *pMemoryRequirements) {
4683 mImpl->on_vkGetImageMemoryRequirements(
4684 context, device, image, pMemoryRequirements);
4685 }
4686
on_vkGetImageMemoryRequirements2(void * context,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)4687 void ResourceTracker::on_vkGetImageMemoryRequirements2(
4688 void *context, VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
4689 VkMemoryRequirements2 *pMemoryRequirements) {
4690 mImpl->on_vkGetImageMemoryRequirements2(
4691 context, device, pInfo, pMemoryRequirements);
4692 }
4693
on_vkGetImageMemoryRequirements2KHR(void * context,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)4694 void ResourceTracker::on_vkGetImageMemoryRequirements2KHR(
4695 void *context, VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
4696 VkMemoryRequirements2 *pMemoryRequirements) {
4697 mImpl->on_vkGetImageMemoryRequirements2KHR(
4698 context, device, pInfo, pMemoryRequirements);
4699 }
4700
on_vkBindImageMemory(void * context,VkResult input_result,VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)4701 VkResult ResourceTracker::on_vkBindImageMemory(
4702 void* context, VkResult input_result,
4703 VkDevice device, VkImage image, VkDeviceMemory memory,
4704 VkDeviceSize memoryOffset) {
4705 return mImpl->on_vkBindImageMemory(
4706 context, input_result, device, image, memory, memoryOffset);
4707 }
4708
on_vkBindImageMemory2(void * context,VkResult input_result,VkDevice device,uint32_t bindingCount,const VkBindImageMemoryInfo * pBindInfos)4709 VkResult ResourceTracker::on_vkBindImageMemory2(
4710 void* context, VkResult input_result,
4711 VkDevice device, uint32_t bindingCount, const VkBindImageMemoryInfo* pBindInfos) {
4712 return mImpl->on_vkBindImageMemory2(
4713 context, input_result, device, bindingCount, pBindInfos);
4714 }
4715
on_vkBindImageMemory2KHR(void * context,VkResult input_result,VkDevice device,uint32_t bindingCount,const VkBindImageMemoryInfo * pBindInfos)4716 VkResult ResourceTracker::on_vkBindImageMemory2KHR(
4717 void* context, VkResult input_result,
4718 VkDevice device, uint32_t bindingCount, const VkBindImageMemoryInfo* pBindInfos) {
4719 return mImpl->on_vkBindImageMemory2KHR(
4720 context, input_result, device, bindingCount, pBindInfos);
4721 }
4722
on_vkCreateBuffer(void * context,VkResult input_result,VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)4723 VkResult ResourceTracker::on_vkCreateBuffer(
4724 void* context, VkResult input_result,
4725 VkDevice device, const VkBufferCreateInfo *pCreateInfo,
4726 const VkAllocationCallbacks *pAllocator,
4727 VkBuffer *pBuffer) {
4728 return mImpl->on_vkCreateBuffer(
4729 context, input_result,
4730 device, pCreateInfo, pAllocator, pBuffer);
4731 }
4732
on_vkDestroyBuffer(void * context,VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)4733 void ResourceTracker::on_vkDestroyBuffer(
4734 void* context,
4735 VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
4736 mImpl->on_vkDestroyBuffer(context, device, buffer, pAllocator);
4737 }
4738
on_vkGetBufferMemoryRequirements(void * context,VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)4739 void ResourceTracker::on_vkGetBufferMemoryRequirements(
4740 void* context, VkDevice device, VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
4741 mImpl->on_vkGetBufferMemoryRequirements(context, device, buffer, pMemoryRequirements);
4742 }
4743
on_vkGetBufferMemoryRequirements2(void * context,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)4744 void ResourceTracker::on_vkGetBufferMemoryRequirements2(
4745 void* context, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
4746 VkMemoryRequirements2* pMemoryRequirements) {
4747 mImpl->on_vkGetBufferMemoryRequirements2(
4748 context, device, pInfo, pMemoryRequirements);
4749 }
4750
on_vkGetBufferMemoryRequirements2KHR(void * context,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)4751 void ResourceTracker::on_vkGetBufferMemoryRequirements2KHR(
4752 void* context, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
4753 VkMemoryRequirements2* pMemoryRequirements) {
4754 mImpl->on_vkGetBufferMemoryRequirements2KHR(
4755 context, device, pInfo, pMemoryRequirements);
4756 }
4757
on_vkBindBufferMemory(void * context,VkResult input_result,VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)4758 VkResult ResourceTracker::on_vkBindBufferMemory(
4759 void* context, VkResult input_result,
4760 VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) {
4761 return mImpl->on_vkBindBufferMemory(
4762 context, input_result,
4763 device, buffer, memory, memoryOffset);
4764 }
4765
on_vkBindBufferMemory2(void * context,VkResult input_result,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)4766 VkResult ResourceTracker::on_vkBindBufferMemory2(
4767 void* context, VkResult input_result,
4768 VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos) {
4769 return mImpl->on_vkBindBufferMemory2(
4770 context, input_result,
4771 device, bindInfoCount, pBindInfos);
4772 }
4773
on_vkBindBufferMemory2KHR(void * context,VkResult input_result,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)4774 VkResult ResourceTracker::on_vkBindBufferMemory2KHR(
4775 void* context, VkResult input_result,
4776 VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos) {
4777 return mImpl->on_vkBindBufferMemory2KHR(
4778 context, input_result,
4779 device, bindInfoCount, pBindInfos);
4780 }
4781
on_vkCreateSemaphore(void * context,VkResult input_result,VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)4782 VkResult ResourceTracker::on_vkCreateSemaphore(
4783 void* context, VkResult input_result,
4784 VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
4785 const VkAllocationCallbacks *pAllocator,
4786 VkSemaphore *pSemaphore) {
4787 return mImpl->on_vkCreateSemaphore(
4788 context, input_result,
4789 device, pCreateInfo, pAllocator, pSemaphore);
4790 }
4791
on_vkDestroySemaphore(void * context,VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)4792 void ResourceTracker::on_vkDestroySemaphore(
4793 void* context,
4794 VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) {
4795 mImpl->on_vkDestroySemaphore(context, device, semaphore, pAllocator);
4796 }
4797
on_vkQueueSubmit(void * context,VkResult input_result,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)4798 VkResult ResourceTracker::on_vkQueueSubmit(
4799 void* context, VkResult input_result,
4800 VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence) {
4801 return mImpl->on_vkQueueSubmit(
4802 context, input_result, queue, submitCount, pSubmits, fence);
4803 }
4804
on_vkQueueWaitIdle(void * context,VkResult input_result,VkQueue queue)4805 VkResult ResourceTracker::on_vkQueueWaitIdle(
4806 void* context, VkResult input_result,
4807 VkQueue queue) {
4808 return mImpl->on_vkQueueWaitIdle(context, input_result, queue);
4809 }
4810
on_vkGetSemaphoreFdKHR(void * context,VkResult input_result,VkDevice device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)4811 VkResult ResourceTracker::on_vkGetSemaphoreFdKHR(
4812 void* context, VkResult input_result,
4813 VkDevice device,
4814 const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
4815 int* pFd) {
4816 return mImpl->on_vkGetSemaphoreFdKHR(context, input_result, device, pGetFdInfo, pFd);
4817 }
4818
on_vkImportSemaphoreFdKHR(void * context,VkResult input_result,VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo)4819 VkResult ResourceTracker::on_vkImportSemaphoreFdKHR(
4820 void* context, VkResult input_result,
4821 VkDevice device,
4822 const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
4823 return mImpl->on_vkImportSemaphoreFdKHR(context, input_result, device, pImportSemaphoreFdInfo);
4824 }
4825
unwrap_VkNativeBufferANDROID(const VkImageCreateInfo * pCreateInfo,VkImageCreateInfo * local_pCreateInfo)4826 void ResourceTracker::unwrap_VkNativeBufferANDROID(
4827 const VkImageCreateInfo* pCreateInfo,
4828 VkImageCreateInfo* local_pCreateInfo) {
4829 mImpl->unwrap_VkNativeBufferANDROID(pCreateInfo, local_pCreateInfo);
4830 }
4831
unwrap_vkAcquireImageANDROID_nativeFenceFd(int fd,int * fd_out)4832 void ResourceTracker::unwrap_vkAcquireImageANDROID_nativeFenceFd(int fd, int* fd_out) {
4833 mImpl->unwrap_vkAcquireImageANDROID_nativeFenceFd(fd, fd_out);
4834 }
4835
4836 #ifdef VK_USE_PLATFORM_FUCHSIA
on_vkGetMemoryZirconHandleFUCHSIA(void * context,VkResult input_result,VkDevice device,const VkMemoryGetZirconHandleInfoFUCHSIA * pInfo,uint32_t * pHandle)4837 VkResult ResourceTracker::on_vkGetMemoryZirconHandleFUCHSIA(
4838 void* context, VkResult input_result,
4839 VkDevice device,
4840 const VkMemoryGetZirconHandleInfoFUCHSIA* pInfo,
4841 uint32_t* pHandle) {
4842 return mImpl->on_vkGetMemoryZirconHandleFUCHSIA(
4843 context, input_result, device, pInfo, pHandle);
4844 }
4845
on_vkGetMemoryZirconHandlePropertiesFUCHSIA(void * context,VkResult input_result,VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,uint32_t handle,VkMemoryZirconHandlePropertiesFUCHSIA * pProperties)4846 VkResult ResourceTracker::on_vkGetMemoryZirconHandlePropertiesFUCHSIA(
4847 void* context, VkResult input_result,
4848 VkDevice device,
4849 VkExternalMemoryHandleTypeFlagBits handleType,
4850 uint32_t handle,
4851 VkMemoryZirconHandlePropertiesFUCHSIA* pProperties) {
4852 return mImpl->on_vkGetMemoryZirconHandlePropertiesFUCHSIA(
4853 context, input_result, device, handleType, handle, pProperties);
4854 }
4855
on_vkGetSemaphoreZirconHandleFUCHSIA(void * context,VkResult input_result,VkDevice device,const VkSemaphoreGetZirconHandleInfoFUCHSIA * pInfo,uint32_t * pHandle)4856 VkResult ResourceTracker::on_vkGetSemaphoreZirconHandleFUCHSIA(
4857 void* context, VkResult input_result,
4858 VkDevice device,
4859 const VkSemaphoreGetZirconHandleInfoFUCHSIA* pInfo,
4860 uint32_t* pHandle) {
4861 return mImpl->on_vkGetSemaphoreZirconHandleFUCHSIA(
4862 context, input_result, device, pInfo, pHandle);
4863 }
4864
on_vkImportSemaphoreZirconHandleFUCHSIA(void * context,VkResult input_result,VkDevice device,const VkImportSemaphoreZirconHandleInfoFUCHSIA * pInfo)4865 VkResult ResourceTracker::on_vkImportSemaphoreZirconHandleFUCHSIA(
4866 void* context, VkResult input_result,
4867 VkDevice device,
4868 const VkImportSemaphoreZirconHandleInfoFUCHSIA* pInfo) {
4869 return mImpl->on_vkImportSemaphoreZirconHandleFUCHSIA(
4870 context, input_result, device, pInfo);
4871 }
4872
on_vkCreateBufferCollectionFUCHSIA(void * context,VkResult input_result,VkDevice device,const VkBufferCollectionCreateInfoFUCHSIA * pInfo,const VkAllocationCallbacks * pAllocator,VkBufferCollectionFUCHSIA * pCollection)4873 VkResult ResourceTracker::on_vkCreateBufferCollectionFUCHSIA(
4874 void* context, VkResult input_result,
4875 VkDevice device,
4876 const VkBufferCollectionCreateInfoFUCHSIA* pInfo,
4877 const VkAllocationCallbacks* pAllocator,
4878 VkBufferCollectionFUCHSIA* pCollection) {
4879 return mImpl->on_vkCreateBufferCollectionFUCHSIA(
4880 context, input_result, device, pInfo, pAllocator, pCollection);
4881 }
4882
on_vkDestroyBufferCollectionFUCHSIA(void * context,VkResult input_result,VkDevice device,VkBufferCollectionFUCHSIA collection,const VkAllocationCallbacks * pAllocator)4883 void ResourceTracker::on_vkDestroyBufferCollectionFUCHSIA(
4884 void* context, VkResult input_result,
4885 VkDevice device,
4886 VkBufferCollectionFUCHSIA collection,
4887 const VkAllocationCallbacks* pAllocator) {
4888 return mImpl->on_vkDestroyBufferCollectionFUCHSIA(
4889 context, input_result, device, collection, pAllocator);
4890 }
4891
on_vkSetBufferCollectionConstraintsFUCHSIA(void * context,VkResult input_result,VkDevice device,VkBufferCollectionFUCHSIA collection,const VkImageCreateInfo * pImageInfo)4892 VkResult ResourceTracker::on_vkSetBufferCollectionConstraintsFUCHSIA(
4893 void* context, VkResult input_result,
4894 VkDevice device,
4895 VkBufferCollectionFUCHSIA collection,
4896 const VkImageCreateInfo* pImageInfo) {
4897 return mImpl->on_vkSetBufferCollectionConstraintsFUCHSIA(
4898 context, input_result, device, collection, pImageInfo);
4899 }
4900
on_vkGetBufferCollectionPropertiesFUCHSIA(void * context,VkResult input_result,VkDevice device,VkBufferCollectionFUCHSIA collection,VkBufferCollectionPropertiesFUCHSIA * pProperties)4901 VkResult ResourceTracker::on_vkGetBufferCollectionPropertiesFUCHSIA(
4902 void* context, VkResult input_result,
4903 VkDevice device,
4904 VkBufferCollectionFUCHSIA collection,
4905 VkBufferCollectionPropertiesFUCHSIA* pProperties) {
4906 return mImpl->on_vkGetBufferCollectionPropertiesFUCHSIA(
4907 context, input_result, device, collection, pProperties);
4908 }
4909 #endif
4910
on_vkGetAndroidHardwareBufferPropertiesANDROID(void * context,VkResult input_result,VkDevice device,const AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties)4911 VkResult ResourceTracker::on_vkGetAndroidHardwareBufferPropertiesANDROID(
4912 void* context, VkResult input_result,
4913 VkDevice device,
4914 const AHardwareBuffer* buffer,
4915 VkAndroidHardwareBufferPropertiesANDROID* pProperties) {
4916 return mImpl->on_vkGetAndroidHardwareBufferPropertiesANDROID(
4917 context, input_result, device, buffer, pProperties);
4918 }
on_vkGetMemoryAndroidHardwareBufferANDROID(void * context,VkResult input_result,VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer)4919 VkResult ResourceTracker::on_vkGetMemoryAndroidHardwareBufferANDROID(
4920 void* context, VkResult input_result,
4921 VkDevice device,
4922 const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
4923 struct AHardwareBuffer** pBuffer) {
4924 return mImpl->on_vkGetMemoryAndroidHardwareBufferANDROID(
4925 context, input_result,
4926 device, pInfo, pBuffer);
4927 }
4928
on_vkCreateSamplerYcbcrConversion(void * context,VkResult input_result,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)4929 VkResult ResourceTracker::on_vkCreateSamplerYcbcrConversion(
4930 void* context, VkResult input_result,
4931 VkDevice device,
4932 const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
4933 const VkAllocationCallbacks* pAllocator,
4934 VkSamplerYcbcrConversion* pYcbcrConversion) {
4935 return mImpl->on_vkCreateSamplerYcbcrConversion(
4936 context, input_result, device, pCreateInfo, pAllocator, pYcbcrConversion);
4937 }
4938
on_vkDestroySamplerYcbcrConversion(void * context,VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)4939 void ResourceTracker::on_vkDestroySamplerYcbcrConversion(
4940 void* context,
4941 VkDevice device,
4942 VkSamplerYcbcrConversion ycbcrConversion,
4943 const VkAllocationCallbacks* pAllocator) {
4944 mImpl->on_vkDestroySamplerYcbcrConversion(
4945 context, device, ycbcrConversion, pAllocator);
4946 }
4947
on_vkCreateSamplerYcbcrConversionKHR(void * context,VkResult input_result,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)4948 VkResult ResourceTracker::on_vkCreateSamplerYcbcrConversionKHR(
4949 void* context, VkResult input_result,
4950 VkDevice device,
4951 const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
4952 const VkAllocationCallbacks* pAllocator,
4953 VkSamplerYcbcrConversion* pYcbcrConversion) {
4954 return mImpl->on_vkCreateSamplerYcbcrConversionKHR(
4955 context, input_result, device, pCreateInfo, pAllocator, pYcbcrConversion);
4956 }
4957
on_vkDestroySamplerYcbcrConversionKHR(void * context,VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)4958 void ResourceTracker::on_vkDestroySamplerYcbcrConversionKHR(
4959 void* context,
4960 VkDevice device,
4961 VkSamplerYcbcrConversion ycbcrConversion,
4962 const VkAllocationCallbacks* pAllocator) {
4963 mImpl->on_vkDestroySamplerYcbcrConversionKHR(
4964 context, device, ycbcrConversion, pAllocator);
4965 }
4966
on_vkCreateSampler(void * context,VkResult input_result,VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)4967 VkResult ResourceTracker::on_vkCreateSampler(
4968 void* context, VkResult input_result,
4969 VkDevice device,
4970 const VkSamplerCreateInfo* pCreateInfo,
4971 const VkAllocationCallbacks* pAllocator,
4972 VkSampler* pSampler) {
4973 return mImpl->on_vkCreateSampler(
4974 context, input_result, device, pCreateInfo, pAllocator, pSampler);
4975 }
4976
on_vkGetPhysicalDeviceExternalFenceProperties(void * context,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)4977 void ResourceTracker::on_vkGetPhysicalDeviceExternalFenceProperties(
4978 void* context,
4979 VkPhysicalDevice physicalDevice,
4980 const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
4981 VkExternalFenceProperties* pExternalFenceProperties) {
4982 mImpl->on_vkGetPhysicalDeviceExternalFenceProperties(
4983 context, physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
4984 }
4985
on_vkGetPhysicalDeviceExternalFencePropertiesKHR(void * context,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)4986 void ResourceTracker::on_vkGetPhysicalDeviceExternalFencePropertiesKHR(
4987 void* context,
4988 VkPhysicalDevice physicalDevice,
4989 const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
4990 VkExternalFenceProperties* pExternalFenceProperties) {
4991 mImpl->on_vkGetPhysicalDeviceExternalFenceProperties(
4992 context, physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
4993 }
4994
on_vkCreateFence(void * context,VkResult input_result,VkDevice device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)4995 VkResult ResourceTracker::on_vkCreateFence(
4996 void* context,
4997 VkResult input_result,
4998 VkDevice device,
4999 const VkFenceCreateInfo* pCreateInfo,
5000 const VkAllocationCallbacks* pAllocator, VkFence* pFence) {
5001 return mImpl->on_vkCreateFence(
5002 context, input_result, device, pCreateInfo, pAllocator, pFence);
5003 }
5004
on_vkDestroyFence(void * context,VkDevice device,VkFence fence,const VkAllocationCallbacks * pAllocator)5005 void ResourceTracker::on_vkDestroyFence(
5006 void* context,
5007 VkDevice device,
5008 VkFence fence,
5009 const VkAllocationCallbacks* pAllocator) {
5010 mImpl->on_vkDestroyFence(
5011 context, device, fence, pAllocator);
5012 }
5013
on_vkResetFences(void * context,VkResult input_result,VkDevice device,uint32_t fenceCount,const VkFence * pFences)5014 VkResult ResourceTracker::on_vkResetFences(
5015 void* context,
5016 VkResult input_result,
5017 VkDevice device,
5018 uint32_t fenceCount,
5019 const VkFence* pFences) {
5020 return mImpl->on_vkResetFences(
5021 context, input_result, device, fenceCount, pFences);
5022 }
5023
on_vkImportFenceFdKHR(void * context,VkResult input_result,VkDevice device,const VkImportFenceFdInfoKHR * pImportFenceFdInfo)5024 VkResult ResourceTracker::on_vkImportFenceFdKHR(
5025 void* context,
5026 VkResult input_result,
5027 VkDevice device,
5028 const VkImportFenceFdInfoKHR* pImportFenceFdInfo) {
5029 return mImpl->on_vkImportFenceFdKHR(
5030 context, input_result, device, pImportFenceFdInfo);
5031 }
5032
on_vkGetFenceFdKHR(void * context,VkResult input_result,VkDevice device,const VkFenceGetFdInfoKHR * pGetFdInfo,int * pFd)5033 VkResult ResourceTracker::on_vkGetFenceFdKHR(
5034 void* context,
5035 VkResult input_result,
5036 VkDevice device,
5037 const VkFenceGetFdInfoKHR* pGetFdInfo,
5038 int* pFd) {
5039 return mImpl->on_vkGetFenceFdKHR(
5040 context, input_result, device, pGetFdInfo, pFd);
5041 }
5042
on_vkWaitForFences(void * context,VkResult input_result,VkDevice device,uint32_t fenceCount,const VkFence * pFences,VkBool32 waitAll,uint64_t timeout)5043 VkResult ResourceTracker::on_vkWaitForFences(
5044 void* context,
5045 VkResult input_result,
5046 VkDevice device,
5047 uint32_t fenceCount,
5048 const VkFence* pFences,
5049 VkBool32 waitAll,
5050 uint64_t timeout) {
5051 return mImpl->on_vkWaitForFences(
5052 context, input_result, device, fenceCount, pFences, waitAll, timeout);
5053 }
5054
on_vkCreateDescriptorPool(void * context,VkResult input_result,VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)5055 VkResult ResourceTracker::on_vkCreateDescriptorPool(
5056 void* context,
5057 VkResult input_result,
5058 VkDevice device,
5059 const VkDescriptorPoolCreateInfo* pCreateInfo,
5060 const VkAllocationCallbacks* pAllocator,
5061 VkDescriptorPool* pDescriptorPool) {
5062 return mImpl->on_vkCreateDescriptorPool(
5063 context, input_result, device, pCreateInfo, pAllocator, pDescriptorPool);
5064 }
5065
on_vkDestroyDescriptorPool(void * context,VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)5066 void ResourceTracker::on_vkDestroyDescriptorPool(
5067 void* context,
5068 VkDevice device,
5069 VkDescriptorPool descriptorPool,
5070 const VkAllocationCallbacks* pAllocator) {
5071 mImpl->on_vkDestroyDescriptorPool(context, device, descriptorPool, pAllocator);
5072 }
5073
on_vkResetDescriptorPool(void * context,VkResult input_result,VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)5074 VkResult ResourceTracker::on_vkResetDescriptorPool(
5075 void* context,
5076 VkResult input_result,
5077 VkDevice device,
5078 VkDescriptorPool descriptorPool,
5079 VkDescriptorPoolResetFlags flags) {
5080 return mImpl->on_vkResetDescriptorPool(
5081 context, input_result, device, descriptorPool, flags);
5082 }
5083
on_vkAllocateDescriptorSets(void * context,VkResult input_result,VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)5084 VkResult ResourceTracker::on_vkAllocateDescriptorSets(
5085 void* context,
5086 VkResult input_result,
5087 VkDevice device,
5088 const VkDescriptorSetAllocateInfo* pAllocateInfo,
5089 VkDescriptorSet* pDescriptorSets) {
5090 return mImpl->on_vkAllocateDescriptorSets(
5091 context, input_result, device, pAllocateInfo, pDescriptorSets);
5092 }
5093
on_vkFreeDescriptorSets(void * context,VkResult input_result,VkDevice device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)5094 VkResult ResourceTracker::on_vkFreeDescriptorSets(
5095 void* context,
5096 VkResult input_result,
5097 VkDevice device,
5098 VkDescriptorPool descriptorPool,
5099 uint32_t descriptorSetCount,
5100 const VkDescriptorSet* pDescriptorSets) {
5101 return mImpl->on_vkFreeDescriptorSets(
5102 context, input_result, device, descriptorPool, descriptorSetCount, pDescriptorSets);
5103 }
5104
on_vkCreateDescriptorSetLayout(void * context,VkResult input_result,VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)5105 VkResult ResourceTracker::on_vkCreateDescriptorSetLayout(
5106 void* context,
5107 VkResult input_result,
5108 VkDevice device,
5109 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
5110 const VkAllocationCallbacks* pAllocator,
5111 VkDescriptorSetLayout* pSetLayout) {
5112 return mImpl->on_vkCreateDescriptorSetLayout(
5113 context, input_result, device, pCreateInfo, pAllocator, pSetLayout);
5114 }
5115
on_vkUpdateDescriptorSets(void * context,VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)5116 void ResourceTracker::on_vkUpdateDescriptorSets(
5117 void* context,
5118 VkDevice device,
5119 uint32_t descriptorWriteCount,
5120 const VkWriteDescriptorSet* pDescriptorWrites,
5121 uint32_t descriptorCopyCount,
5122 const VkCopyDescriptorSet* pDescriptorCopies) {
5123 return mImpl->on_vkUpdateDescriptorSets(
5124 context, device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
5125 }
5126
on_vkMapMemoryIntoAddressSpaceGOOGLE_pre(void * context,VkResult input_result,VkDevice device,VkDeviceMemory memory,uint64_t * pAddress)5127 VkResult ResourceTracker::on_vkMapMemoryIntoAddressSpaceGOOGLE_pre(
5128 void* context,
5129 VkResult input_result,
5130 VkDevice device,
5131 VkDeviceMemory memory,
5132 uint64_t* pAddress) {
5133 return mImpl->on_vkMapMemoryIntoAddressSpaceGOOGLE_pre(
5134 context, input_result, device, memory, pAddress);
5135 }
5136
on_vkMapMemoryIntoAddressSpaceGOOGLE(void * context,VkResult input_result,VkDevice device,VkDeviceMemory memory,uint64_t * pAddress)5137 VkResult ResourceTracker::on_vkMapMemoryIntoAddressSpaceGOOGLE(
5138 void* context,
5139 VkResult input_result,
5140 VkDevice device,
5141 VkDeviceMemory memory,
5142 uint64_t* pAddress) {
5143 return mImpl->on_vkMapMemoryIntoAddressSpaceGOOGLE(
5144 context, input_result, device, memory, pAddress);
5145 }
5146
on_vkCreateDescriptorUpdateTemplate(void * context,VkResult input_result,VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)5147 VkResult ResourceTracker::on_vkCreateDescriptorUpdateTemplate(
5148 void* context, VkResult input_result,
5149 VkDevice device,
5150 const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
5151 const VkAllocationCallbacks* pAllocator,
5152 VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
5153 return mImpl->on_vkCreateDescriptorUpdateTemplate(
5154 context, input_result,
5155 device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
5156 }
5157
on_vkCreateDescriptorUpdateTemplateKHR(void * context,VkResult input_result,VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)5158 VkResult ResourceTracker::on_vkCreateDescriptorUpdateTemplateKHR(
5159 void* context, VkResult input_result,
5160 VkDevice device,
5161 const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
5162 const VkAllocationCallbacks* pAllocator,
5163 VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
5164 return mImpl->on_vkCreateDescriptorUpdateTemplateKHR(
5165 context, input_result,
5166 device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
5167 }
5168
on_vkUpdateDescriptorSetWithTemplate(void * context,VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)5169 void ResourceTracker::on_vkUpdateDescriptorSetWithTemplate(
5170 void* context,
5171 VkDevice device,
5172 VkDescriptorSet descriptorSet,
5173 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
5174 const void* pData) {
5175 mImpl->on_vkUpdateDescriptorSetWithTemplate(
5176 context, device, descriptorSet,
5177 descriptorUpdateTemplate, pData);
5178 }
5179
on_vkGetPhysicalDeviceImageFormatProperties2(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)5180 VkResult ResourceTracker::on_vkGetPhysicalDeviceImageFormatProperties2(
5181 void* context, VkResult input_result,
5182 VkPhysicalDevice physicalDevice,
5183 const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
5184 VkImageFormatProperties2* pImageFormatProperties) {
5185 return mImpl->on_vkGetPhysicalDeviceImageFormatProperties2(
5186 context, input_result, physicalDevice, pImageFormatInfo,
5187 pImageFormatProperties);
5188 }
5189
on_vkGetPhysicalDeviceImageFormatProperties2KHR(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)5190 VkResult ResourceTracker::on_vkGetPhysicalDeviceImageFormatProperties2KHR(
5191 void* context, VkResult input_result,
5192 VkPhysicalDevice physicalDevice,
5193 const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
5194 VkImageFormatProperties2* pImageFormatProperties) {
5195 return mImpl->on_vkGetPhysicalDeviceImageFormatProperties2KHR(
5196 context, input_result, physicalDevice, pImageFormatInfo,
5197 pImageFormatProperties);
5198 }
5199
syncEncodersForCommandBuffer(VkCommandBuffer commandBuffer,VkEncoder * current)5200 uint32_t ResourceTracker::syncEncodersForCommandBuffer(VkCommandBuffer commandBuffer, VkEncoder* current) {
5201 return mImpl->syncEncodersForCommandBuffer(commandBuffer, current);
5202 }
5203
on_vkBeginCommandBuffer(void * context,VkResult input_result,VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo)5204 VkResult ResourceTracker::on_vkBeginCommandBuffer(
5205 void* context, VkResult input_result,
5206 VkCommandBuffer commandBuffer,
5207 const VkCommandBufferBeginInfo* pBeginInfo) {
5208 return mImpl->on_vkBeginCommandBuffer(
5209 context, input_result, commandBuffer, pBeginInfo);
5210 }
5211
on_vkEndCommandBuffer(void * context,VkResult input_result,VkCommandBuffer commandBuffer)5212 VkResult ResourceTracker::on_vkEndCommandBuffer(
5213 void* context, VkResult input_result,
5214 VkCommandBuffer commandBuffer) {
5215 return mImpl->on_vkEndCommandBuffer(
5216 context, input_result, commandBuffer);
5217 }
5218
on_vkResetCommandBuffer(void * context,VkResult input_result,VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)5219 VkResult ResourceTracker::on_vkResetCommandBuffer(
5220 void* context, VkResult input_result,
5221 VkCommandBuffer commandBuffer,
5222 VkCommandBufferResetFlags flags) {
5223 return mImpl->on_vkResetCommandBuffer(
5224 context, input_result, commandBuffer, flags);
5225 }
5226
on_vkCreateImageView(void * context,VkResult input_result,VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)5227 VkResult ResourceTracker::on_vkCreateImageView(
5228 void* context, VkResult input_result,
5229 VkDevice device,
5230 const VkImageViewCreateInfo* pCreateInfo,
5231 const VkAllocationCallbacks* pAllocator,
5232 VkImageView* pView) {
5233 return mImpl->on_vkCreateImageView(
5234 context, input_result, device, pCreateInfo, pAllocator, pView);
5235 }
5236
deviceMemoryTransform_tohost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)5237 void ResourceTracker::deviceMemoryTransform_tohost(
5238 VkDeviceMemory* memory, uint32_t memoryCount,
5239 VkDeviceSize* offset, uint32_t offsetCount,
5240 VkDeviceSize* size, uint32_t sizeCount,
5241 uint32_t* typeIndex, uint32_t typeIndexCount,
5242 uint32_t* typeBits, uint32_t typeBitsCount) {
5243 mImpl->deviceMemoryTransform_tohost(
5244 memory, memoryCount,
5245 offset, offsetCount,
5246 size, sizeCount,
5247 typeIndex, typeIndexCount,
5248 typeBits, typeBitsCount);
5249 }
5250
deviceMemoryTransform_fromhost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)5251 void ResourceTracker::deviceMemoryTransform_fromhost(
5252 VkDeviceMemory* memory, uint32_t memoryCount,
5253 VkDeviceSize* offset, uint32_t offsetCount,
5254 VkDeviceSize* size, uint32_t sizeCount,
5255 uint32_t* typeIndex, uint32_t typeIndexCount,
5256 uint32_t* typeBits, uint32_t typeBitsCount) {
5257 mImpl->deviceMemoryTransform_fromhost(
5258 memory, memoryCount,
5259 offset, offsetCount,
5260 size, sizeCount,
5261 typeIndex, typeIndexCount,
5262 typeBits, typeBitsCount);
5263 }
5264
5265 #define DEFINE_TRANSFORMED_TYPE_IMPL(type) \
5266 void ResourceTracker::transformImpl_##type##_tohost(const type*, uint32_t) { } \
5267 void ResourceTracker::transformImpl_##type##_fromhost(const type*, uint32_t) { } \
5268
5269 LIST_TRANSFORMED_TYPES(DEFINE_TRANSFORMED_TYPE_IMPL)
5270
5271 } // namespace goldfish_vk
5272