1 /*
2 * Copyright © 2017 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23 #ifndef VK_UTIL_H
24 #define VK_UTIL_H
25
26 /* common inlines and macros for vulkan drivers */
27
28 #include <inttypes.h>
29 #include <stdio.h>
30 #include <stdlib.h>
31 #include <vulkan/vulkan.h>
32
33 #include <chrono>
34 #include <functional>
35 #include <memory>
36 #include <optional>
37 #include <string>
38 #include <thread>
39 #include <tuple>
40 #include <type_traits>
41 #include <vector>
42
43 #include "VkDecoderContext.h"
44 #include "VulkanDispatch.h"
45 #include "aemu/base/synchronization/Lock.h"
46 #include "host-common/GfxstreamFatalError.h"
47 #include "host-common/logging.h"
48 #include "vk_fn_info.h"
49 #include "vk_struct_id.h"
50
51 namespace gfxstream {
52 namespace vk {
53
54 struct vk_struct_common {
55 VkStructureType sType;
56 struct vk_struct_common* pNext;
57 };
58
59 struct vk_struct_chain_iterator {
60 vk_struct_common* value;
61 };
62
63 #define vk_foreach_struct(__iter, __start) \
64 for (struct vk_struct_common* __iter = (struct vk_struct_common*)(__start); __iter; \
65 __iter = __iter->pNext)
66
67 #define vk_foreach_struct_const(__iter, __start) \
68 for (const struct vk_struct_common* __iter = (const struct vk_struct_common*)(__start); \
69 __iter; __iter = __iter->pNext)
70
71 /**
72 * A wrapper for a Vulkan output array. A Vulkan output array is one that
73 * follows the convention of the parameters to
74 * vkGetPhysicalDeviceQueueFamilyProperties().
75 *
76 * Example Usage:
77 *
78 * VkResult
79 * vkGetPhysicalDeviceQueueFamilyProperties(
80 * VkPhysicalDevice physicalDevice,
81 * uint32_t* pQueueFamilyPropertyCount,
82 * VkQueueFamilyProperties* pQueueFamilyProperties)
83 * {
84 * VK_OUTARRAY_MAKE(props, pQueueFamilyProperties,
85 * pQueueFamilyPropertyCount);
86 *
87 * vk_outarray_append(&props, p) {
88 * p->queueFlags = ...;
89 * p->queueCount = ...;
90 * }
91 *
92 * vk_outarray_append(&props, p) {
93 * p->queueFlags = ...;
94 * p->queueCount = ...;
95 * }
96 *
97 * return vk_outarray_status(&props);
98 * }
99 */
100 struct __vk_outarray {
101 /** May be null. */
102 void* data;
103
104 /**
105 * Capacity, in number of elements. Capacity is unlimited (UINT32_MAX) if
106 * data is null.
107 */
108 uint32_t cap;
109
110 /**
111 * Count of elements successfully written to the array. Every write is
112 * considered successful if data is null.
113 */
114 uint32_t* filled_len;
115
116 /**
117 * Count of elements that would have been written to the array if its
118 * capacity were sufficient. Vulkan functions often return VK_INCOMPLETE
119 * when `*filled_len < wanted_len`.
120 */
121 uint32_t wanted_len;
122 };
123
__vk_outarray_init(struct __vk_outarray * a,void * data,uint32_t * len)124 static inline void __vk_outarray_init(struct __vk_outarray* a, void* data, uint32_t* len) {
125 a->data = data;
126 a->cap = *len;
127 a->filled_len = len;
128 *a->filled_len = 0;
129 a->wanted_len = 0;
130
131 if (a->data == NULL) a->cap = UINT32_MAX;
132 }
133
__vk_outarray_status(const struct __vk_outarray * a)134 static inline VkResult __vk_outarray_status(const struct __vk_outarray* a) {
135 if (*a->filled_len < a->wanted_len)
136 return VK_INCOMPLETE;
137 else
138 return VK_SUCCESS;
139 }
140
__vk_outarray_next(struct __vk_outarray * a,size_t elem_size)141 static inline void* __vk_outarray_next(struct __vk_outarray* a, size_t elem_size) {
142 void* p = NULL;
143
144 a->wanted_len += 1;
145
146 if (*a->filled_len >= a->cap) return NULL;
147
148 if (a->data != NULL) p = ((uint8_t*)a->data) + (*a->filled_len) * elem_size;
149
150 *a->filled_len += 1;
151
152 return p;
153 }
154
155 #define vk_outarray(elem_t) \
156 struct { \
157 struct __vk_outarray base; \
158 elem_t meta[]; \
159 }
160
161 #define vk_outarray_typeof_elem(a) __typeof__((a)->meta[0])
162 #define vk_outarray_sizeof_elem(a) sizeof((a)->meta[0])
163
164 #define vk_outarray_init(a, data, len) __vk_outarray_init(&(a)->base, (data), (len))
165
166 #define VK_OUTARRAY_MAKE(name, data, len) \
167 vk_outarray(__typeof__((data)[0])) name; \
168 vk_outarray_init(&name, (data), (len))
169
170 #define vk_outarray_status(a) __vk_outarray_status(&(a)->base)
171
172 #define vk_outarray_next(a) \
173 ((vk_outarray_typeof_elem(a)*)__vk_outarray_next(&(a)->base, vk_outarray_sizeof_elem(a)))
174
175 /**
176 * Append to a Vulkan output array.
177 *
178 * This is a block-based macro. For example:
179 *
180 * vk_outarray_append(&a, elem) {
181 * elem->foo = ...;
182 * elem->bar = ...;
183 * }
184 *
185 * The array `a` has type `vk_outarray(elem_t) *`. It is usually declared with
186 * VK_OUTARRAY_MAKE(). The variable `elem` is block-scoped and has type
187 * `elem_t *`.
188 *
189 * The macro unconditionally increments the array's `wanted_len`. If the array
190 * is not full, then the macro also increment its `filled_len` and then
191 * executes the block. When the block is executed, `elem` is non-null and
192 * points to the newly appended element.
193 */
194 #define vk_outarray_append(a, elem) \
195 for (vk_outarray_typeof_elem(a)* elem = vk_outarray_next(a); elem != NULL; elem = NULL)
196
__vk_find_struct(void * start,VkStructureType sType)197 static inline void* __vk_find_struct(void* start, VkStructureType sType) {
198 vk_foreach_struct(s, start) {
199 if (s->sType == sType) return s;
200 }
201
202 return NULL;
203 }
204
205 template <class T, class H>
vk_find_struct(H * head)206 T* vk_find_struct(H* head) {
207 (void)vk_get_vk_struct_id<H>::id;
208 return static_cast<T*>(__vk_find_struct(static_cast<void*>(head), vk_get_vk_struct_id<T>::id));
209 }
210
211 template <class T, class H>
vk_find_struct(const H * head)212 const T* vk_find_struct(const H* head) {
213 (void)vk_get_vk_struct_id<H>::id;
214 return static_cast<const T*>(__vk_find_struct(const_cast<void*>(static_cast<const void*>(head)),
215 vk_get_vk_struct_id<T>::id));
216 }
217
218 uint32_t vk_get_driver_version(void);
219
220 uint32_t vk_get_version_override(void);
221
222 #define VK_EXT_OFFSET (1000000000UL)
223 #define VK_ENUM_EXTENSION(__enum) \
224 ((__enum) >= VK_EXT_OFFSET ? ((((__enum)-VK_EXT_OFFSET) / 1000UL) + 1) : 0)
225 #define VK_ENUM_OFFSET(__enum) ((__enum) >= VK_EXT_OFFSET ? ((__enum) % 1000) : (__enum))
226
227 template <class T>
vk_make_orphan_copy(const T & vk_struct)228 T vk_make_orphan_copy(const T& vk_struct) {
229 T copy = vk_struct;
230 copy.pNext = NULL;
231 return copy;
232 }
233
234 template <class T>
vk_make_chain_iterator(T * vk_struct)235 vk_struct_chain_iterator vk_make_chain_iterator(T* vk_struct) {
236 (void)vk_get_vk_struct_id<T>::id;
237 vk_struct_chain_iterator result = {reinterpret_cast<vk_struct_common*>(vk_struct)};
238 return result;
239 }
240
241 template <class T>
vk_append_struct(vk_struct_chain_iterator * i,T * vk_struct)242 void vk_append_struct(vk_struct_chain_iterator* i, T* vk_struct) {
243 (void)vk_get_vk_struct_id<T>::id;
244
245 vk_struct_common* p = i->value;
246 if (p->pNext) {
247 ::abort();
248 }
249
250 p->pNext = reinterpret_cast<vk_struct_common*>(vk_struct);
251 vk_struct->pNext = NULL;
252
253 *i = vk_make_chain_iterator(vk_struct);
254 }
255
256 // The caller should guarantee that all the pNext structs in the chain starting at nextChain is not
257 // a const object to avoid unexpected undefined behavior.
258 template <class T, class U, typename = std::enable_if_t<!std::is_const_v<T> && !std::is_const_v<U>>>
vk_insert_struct(T & pos,U & nextChain)259 void vk_insert_struct(T& pos, U& nextChain) {
260 vk_struct_common* nextChainTail = reinterpret_cast<vk_struct_common*>(&nextChain);
261 for (; nextChainTail->pNext; nextChainTail = nextChainTail->pNext) {}
262
263 nextChainTail->pNext = reinterpret_cast<vk_struct_common*>(const_cast<void*>(pos.pNext));
264 pos.pNext = &nextChain;
265 }
266
267 template <class S, class T>
vk_struct_chain_remove(S * unwanted,T * vk_struct)268 void vk_struct_chain_remove(S* unwanted, T* vk_struct) {
269 if (!unwanted) return;
270
271 vk_foreach_struct(current, vk_struct) {
272 if ((void*)unwanted == current->pNext) {
273 const vk_struct_common* unwanted_as_common =
274 reinterpret_cast<const vk_struct_common*>(unwanted);
275 current->pNext = unwanted_as_common->pNext;
276 }
277 }
278 }
279
280 template <class TypeToFilter, class H>
vk_struct_chain_filter(H * head)281 void vk_struct_chain_filter(H* head) {
282 (void)vk_get_vk_struct_id<H>::id;
283
284 auto* curr = reinterpret_cast<vk_struct_common*>(head);
285 while (curr != nullptr) {
286 if (curr->pNext != nullptr && curr->pNext->sType == vk_get_vk_struct_id<TypeToFilter>::id) {
287 curr->pNext = curr->pNext->pNext;
288 }
289 curr = curr->pNext;
290 }
291 }
292
293 #define VK_CHECK(x) \
294 do { \
295 VkResult err = x; \
296 if (err != VK_SUCCESS) { \
297 if (err == VK_ERROR_DEVICE_LOST) { \
298 vk_util::getVkCheckCallbacks().callIfExists( \
299 &vk_util::VkCheckCallbacks::onVkErrorDeviceLost); \
300 } \
301 if (err == VK_ERROR_OUT_OF_HOST_MEMORY || err == VK_ERROR_OUT_OF_DEVICE_MEMORY || \
302 err == VK_ERROR_OUT_OF_POOL_MEMORY) { \
303 vk_util::getVkCheckCallbacks().callIfExists( \
304 &vk_util::VkCheckCallbacks::onVkErrorOutOfMemory, err, __func__, __LINE__); \
305 } \
306 GFXSTREAM_ABORT(::emugl::FatalError(err)); \
307 } \
308 } while (0)
309
310 #define VK_CHECK_MEMALLOC(x, allocateInfo) \
311 do { \
312 VkResult err = x; \
313 if (err != VK_SUCCESS) { \
314 if (err == VK_ERROR_OUT_OF_HOST_MEMORY || err == VK_ERROR_OUT_OF_DEVICE_MEMORY) { \
315 vk_util::getVkCheckCallbacks().callIfExists( \
316 &vk_util::VkCheckCallbacks::onVkErrorOutOfMemoryOnAllocation, err, __func__, \
317 __LINE__, allocateInfo.allocationSize); \
318 } \
319 GFXSTREAM_ABORT(::emugl::FatalError(err)); \
320 } \
321 } while (0)
322
323 typedef void* MTLTextureRef;
324 typedef void* MTLBufferRef;
325
326 namespace vk_util {
327
waitForVkQueueIdleWithRetry(const VulkanDispatch & vk,VkQueue queue)328 inline VkResult waitForVkQueueIdleWithRetry(const VulkanDispatch& vk, VkQueue queue) {
329 using namespace std::chrono_literals;
330 constexpr uint32_t retryLimit = 5;
331 constexpr std::chrono::duration waitInterval = 4ms;
332 VkResult res = vk.vkQueueWaitIdle(queue);
333 for (uint32_t retryTimes = 1; retryTimes < retryLimit && res == VK_TIMEOUT; retryTimes++) {
334 INFO("VK_TIMEOUT returned from vkQueueWaitIdle with %" PRIu32 " attempt. Wait for %" PRIu32
335 "ms before another attempt.",
336 retryTimes,
337 static_cast<uint32_t>(
338 std::chrono::duration_cast<std::chrono::milliseconds>(waitInterval).count()));
339 std::this_thread::sleep_for(waitInterval);
340 res = vk.vkQueueWaitIdle(queue);
341 }
342 return res;
343 }
344
345 typedef struct {
346 std::function<void()> onVkErrorDeviceLost;
347 std::function<void(VkResult, const char*, int)> onVkErrorOutOfMemory;
348 std::function<void(VkResult, const char*, int, uint64_t)> onVkErrorOutOfMemoryOnAllocation;
349 } VkCheckCallbacks;
350
351 template <class T>
352 class CallbacksWrapper {
353 public:
CallbacksWrapper(std::unique_ptr<T> callbacks)354 CallbacksWrapper(std::unique_ptr<T> callbacks) : mCallbacks(std::move(callbacks)) {}
355 // function should be a member function pointer to T.
356 template <class U, class... Args>
callIfExists(U function,Args &&...args)357 void callIfExists(U function, Args&&... args) const {
358 if (mCallbacks && (*mCallbacks.*function)) {
359 (*mCallbacks.*function)(std::forward<Args>(args)...);
360 }
361 }
362
get()363 T* get() const { return mCallbacks.get(); }
364
365 private:
366 std::unique_ptr<T> mCallbacks;
367 };
368
369 std::optional<uint32_t> findMemoryType(const VulkanDispatch* ivk, VkPhysicalDevice physicalDevice,
370 uint32_t typeFilter, VkMemoryPropertyFlags properties);
371
372 void setVkCheckCallbacks(std::unique_ptr<VkCheckCallbacks>);
373 const CallbacksWrapper<VkCheckCallbacks>& getVkCheckCallbacks();
374
375 class CrtpBase {};
376
377 // Utility class to make chaining inheritance of multiple CRTP classes more
378 // readable by allowing one to replace
379 //
380 // class MyClass
381 // : public vk_util::Crtp1<MyClass,
382 // vk_util::Crtp2<MyClass,
383 // vk_util::Crtp3<MyClass>>> {};
384 //
385 // with
386 //
387 // class MyClass :
388 // : public vk_util::MultiCrtp<MyClass,
389 // vk_util::Crtp1,
390 // vk_util::Crtp2,
391 // vk_util::Ctrp3> {};
392 namespace vk_util_internal {
393
394 // For the template "recursion", this is the base case where the list is empty
395 // and which just inherits from the last type.
396 template <typename T, //
397 typename U, //
398 template <typename, typename> class... CrtpClasses>
399 class MultiCrtpChainHelper : public U {};
400
401 // For the template "recursion", this is the case where the list is not empty
402 // and which uses the "current" CRTP class as the "U" type and passes the
403 // resulting type to the next step in the template "recursion".
404 template <typename T, //
405 typename U, //
406 template <typename, typename> class Crtp, //
407 template <typename, typename> class... Crtps>
408 class MultiCrtpChainHelper<T, U, Crtp, Crtps...>
409 : public MultiCrtpChainHelper<T, Crtp<T, U>, Crtps...> {};
410
411 } // namespace vk_util_internal
412
413 template <typename T, //
414 template <typename, typename> class... CrtpClasses>
415 class MultiCrtp : public vk_util_internal::MultiCrtpChainHelper<T, CrtpBase, CrtpClasses...> {};
416
417 template <class T, class U = CrtpBase>
418 class FindMemoryType : public U {
419 protected:
findMemoryType(uint32_t typeFilter,VkMemoryPropertyFlags properties)420 std::optional<uint32_t> findMemoryType(uint32_t typeFilter,
421 VkMemoryPropertyFlags properties) const {
422 const T& self = static_cast<const T&>(*this);
423 return vk_util::findMemoryType(&self.m_vk, self.m_vkPhysicalDevice, typeFilter, properties);
424 }
425 };
426
427 template <class T, class U = CrtpBase>
428 class RunSingleTimeCommand : public U {
429 protected:
runSingleTimeCommands(VkQueue queue,std::shared_ptr<android::base::Lock> queueLock,std::function<void (const VkCommandBuffer & commandBuffer)> f)430 void runSingleTimeCommands(VkQueue queue, std::shared_ptr<android::base::Lock> queueLock,
431 std::function<void(const VkCommandBuffer& commandBuffer)> f) const {
432 const T& self = static_cast<const T&>(*this);
433 VkCommandBuffer cmdBuff;
434 VkCommandBufferAllocateInfo cmdBuffAllocInfo = {
435 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
436 .commandPool = self.m_vkCommandPool,
437 .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
438 .commandBufferCount = 1};
439 VK_CHECK(self.m_vk.vkAllocateCommandBuffers(self.m_vkDevice, &cmdBuffAllocInfo, &cmdBuff));
440 VkCommandBufferBeginInfo beginInfo = {.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
441 .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT};
442 VK_CHECK(self.m_vk.vkBeginCommandBuffer(cmdBuff, &beginInfo));
443 f(cmdBuff);
444 VK_CHECK(self.m_vk.vkEndCommandBuffer(cmdBuff));
445 VkSubmitInfo submitInfo = {.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
446 .commandBufferCount = 1,
447 .pCommandBuffers = &cmdBuff};
448 {
449 std::unique_ptr<android::base::AutoLock> lock = nullptr;
450 if (queueLock) {
451 lock = std::make_unique<android::base::AutoLock>(*queueLock);
452 }
453 VK_CHECK(self.m_vk.vkQueueSubmit(queue, 1, &submitInfo, VK_NULL_HANDLE));
454 VK_CHECK(self.m_vk.vkQueueWaitIdle(queue));
455 }
456 self.m_vk.vkFreeCommandBuffers(self.m_vkDevice, self.m_vkCommandPool, 1, &cmdBuff);
457 }
458 };
459 template <class T, class U = CrtpBase>
460 class RecordImageLayoutTransformCommands : public U {
461 protected:
recordImageLayoutTransformCommands(VkCommandBuffer cmdBuff,VkImage image,VkImageLayout oldLayout,VkImageLayout newLayout)462 void recordImageLayoutTransformCommands(VkCommandBuffer cmdBuff, VkImage image,
463 VkImageLayout oldLayout,
464 VkImageLayout newLayout) const {
465 const T& self = static_cast<const T&>(*this);
466 VkImageMemoryBarrier imageBarrier = {
467 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
468 .srcAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
469 .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
470 .oldLayout = oldLayout,
471 .newLayout = newLayout,
472 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
473 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
474 .image = image,
475 .subresourceRange = {.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
476 .baseMipLevel = 0,
477 .levelCount = 1,
478 .baseArrayLayer = 0,
479 .layerCount = 1}};
480 self.m_vk.vkCmdPipelineBarrier(cmdBuff, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
481 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0,
482 nullptr, 1, &imageBarrier);
483 }
484 };
485
486 template <class T>
getVkInstanceProcAddrWithFallback(const std::vector<std::function<std::remove_pointer_t<PFN_vkGetInstanceProcAddr>>> & vkGetInstanceProcAddrs,VkInstance instance)487 typename vk_fn_info::GetVkFnInfo<T>::type getVkInstanceProcAddrWithFallback(
488 const std::vector<std::function<std::remove_pointer_t<PFN_vkGetInstanceProcAddr>>>&
489 vkGetInstanceProcAddrs,
490 VkInstance instance) {
491 for (const auto& vkGetInstanceProcAddr : vkGetInstanceProcAddrs) {
492 if (!vkGetInstanceProcAddr) {
493 continue;
494 }
495 PFN_vkVoidFunction resWithCurrentVkGetInstanceProcAddr = std::apply(
496 [&vkGetInstanceProcAddr, instance](auto&&... names) -> PFN_vkVoidFunction {
497 for (const char* name : {names...}) {
498 if (PFN_vkVoidFunction resWithCurrentName =
499 vkGetInstanceProcAddr(instance, name)) {
500 return resWithCurrentName;
501 }
502 }
503 return nullptr;
504 },
505 vk_fn_info::GetVkFnInfo<T>::names);
506 if (resWithCurrentVkGetInstanceProcAddr) {
507 return reinterpret_cast<typename vk_fn_info::GetVkFnInfo<T>::type>(
508 resWithCurrentVkGetInstanceProcAddr);
509 }
510 }
511 return nullptr;
512 }
513
vk_descriptor_type_has_image_view(VkDescriptorType type)514 static inline bool vk_descriptor_type_has_image_view(VkDescriptorType type) {
515 switch (type) {
516 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
517 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
518 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
519 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
520 return true;
521 default:
522 return false;
523 }
524 }
525
526 } // namespace vk_util
527 } // namespace vk
528 } // namespace gfxstream
529
530 #endif /* VK_UTIL_H */
531