• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2021 Google LLC
3  * SPDX-License-Identifier: MIT
4  */
5 
6 #ifndef VKR_COMMON_H
7 #define VKR_COMMON_H
8 
9 #include "config.h"
10 
11 #include <assert.h>
12 #include <errno.h>
13 #include <stdatomic.h>
14 #include <stdbool.h>
15 #include <stddef.h>
16 #include <stdint.h>
17 #include <stdlib.h>
18 #include <string.h>
19 
20 #include "c11/threads.h"
21 #include "os/os_misc.h"
22 #include "os/os_thread.h"
23 #include "pipe/p_compiler.h"
24 #include "util/u_double_list.h"
25 #include "util/u_hash_table.h"
26 #include "util/u_math.h"
27 #include "util/u_memory.h"
28 #include "util/u_pointer.h"
29 #include "venus-protocol/vulkan.h"
30 #include "virgl_util.h"
31 #include "virglrenderer.h"
32 #include "vrend_debug.h"
33 
34 #include "vkr_renderer.h"
35 
36 /*
37  * TODO what extensions do we need from the host driver?
38  *
39  * We don't check vkGetPhysicalDeviceExternalBufferProperties, etc. yet.  Even
40  * if we did, silently adding external memory info to vkCreateBuffer or
41  * vkCreateImage could change the results of vkGetBufferMemoryRequirements or
42  * vkGetImageMemoryRequirements and confuse the guest.
43  */
44 #define FORCE_ENABLE_DMABUF
45 
46 #define VKR_DEBUG(category) (unlikely(vkr_debug_flags & VKR_DEBUG_##category))
47 
48 /* define a type-safe cast function */
49 #define VKR_DEFINE_OBJECT_CAST(vkr_type, vk_enum, vk_type)                               \
50    static inline struct vkr_##vkr_type *vkr_##vkr_type##_from_handle(vk_type handle)     \
51    {                                                                                     \
52       struct vkr_##vkr_type *obj = (struct vkr_##vkr_type *)(uintptr_t)handle;           \
53       if (obj) {                                                                         \
54          assert(obj->base.type == vk_enum);                                              \
55          assert(obj->base.id);                                                           \
56          assert(obj->base.handle.vkr_type);                                              \
57          assert((uintptr_t)obj->base.handle.vkr_type == obj->base.handle.u64);           \
58       }                                                                                  \
59       return obj;                                                                        \
60    }
61 
62 /* vkr_region_is_valid should be used to check for overflows */
63 #define VKR_REGION_INIT(offset, size)                                                    \
64    {                                                                                     \
65       .begin = (offset), .end = (offset) + (size)                                        \
66    }
67 
68 struct vkr_context;
69 struct vkr_instance;
70 struct vkr_physical_device;
71 struct vkr_device;
72 struct vkr_queue;
73 struct vkr_fence;
74 struct vkr_semaphore;
75 struct vkr_event;
76 struct vkr_device_memory;
77 struct vkr_buffer;
78 struct vkr_buffer_view;
79 struct vkr_image;
80 struct vkr_image_view;
81 struct vkr_sampler;
82 struct vkr_sampler_ycbcr_conversion;
83 struct vkr_descriptor_set_layout;
84 struct vkr_descriptor_pool;
85 struct vkr_descriptor_set;
86 struct vkr_descriptor_update_template;
87 struct vkr_render_pass;
88 struct vkr_framebuffer;
89 struct vkr_query_pool;
90 struct vkr_shader_module;
91 struct vkr_pipeline_layout;
92 struct vkr_pipeline_cache;
93 struct vkr_pipeline;
94 struct vkr_command_pool;
95 struct vkr_command_buffer;
96 
97 typedef uint64_t vkr_object_id;
98 
99 enum vkr_debug_flags {
100    VKR_DEBUG_VALIDATE = 1 << 0,
101 };
102 
103 /* base class for all objects */
104 struct vkr_object {
105    VkObjectType type;
106    vkr_object_id id;
107 
108    union {
109       uint64_t u64;
110 
111       VkInstance instance;
112       VkPhysicalDevice physical_device;
113       VkDevice device;
114       VkQueue queue;
115       VkCommandBuffer command_buffer;
116 
117       VkBuffer buffer;
118       VkImage image;
119       VkSemaphore semaphore;
120       VkFence fence;
121       VkDeviceMemory device_memory;
122       VkEvent event;
123       VkQueryPool query_pool;
124       VkBufferView buffer_view;
125       VkImageView image_view;
126       VkShaderModule shader_module;
127       VkPipelineCache pipeline_cache;
128       VkPipelineLayout pipeline_layout;
129       VkPipeline pipeline;
130       VkRenderPass render_pass;
131       VkDescriptorSetLayout descriptor_set_layout;
132       VkSampler sampler;
133       VkDescriptorSet descriptor_set;
134       VkDescriptorPool descriptor_pool;
135       VkFramebuffer framebuffer;
136       VkCommandPool command_pool;
137       VkSamplerYcbcrConversion sampler_ycbcr_conversion;
138       VkDescriptorUpdateTemplate descriptor_update_template;
139    } handle;
140 
141    struct list_head track_head;
142 };
143 
144 struct object_array {
145    uint32_t count;
146    void **objects;
147    void *handle_storage;
148 
149    /* true if the ownership of the objects has been transferred (to
150     * vkr_context::object_table)
151     */
152    bool objects_stolen;
153 };
154 
155 struct vkr_region {
156    size_t begin;
157    size_t end;
158 };
159 
160 extern uint32_t vkr_renderer_flags;
161 extern uint32_t vkr_debug_flags;
162 
163 void
164 vkr_log(const char *fmt, ...);
165 
166 bool
167 object_array_init(struct vkr_context *ctx,
168                   struct object_array *arr,
169                   uint32_t count,
170                   VkObjectType obj_type,
171                   size_t obj_size,
172                   size_t handle_size,
173                   const void *handles);
174 
175 void
176 object_array_fini(struct object_array *arr);
177 
178 static inline void *
vkr_find_pnext(const void * chain,VkStructureType type)179 vkr_find_pnext(const void *chain, VkStructureType type)
180 {
181    VkBaseOutStructure *pnext = (VkBaseOutStructure *)chain;
182    while (pnext) {
183       if (pnext->sType == type)
184          return pnext;
185       pnext = pnext->pNext;
186    }
187    return NULL;
188 }
189 
190 static inline bool
vkr_is_recognized_object_type(VkObjectType type)191 vkr_is_recognized_object_type(VkObjectType type)
192 {
193    switch (type) {
194    /* VK_VERSION_1_0 */
195    case VK_OBJECT_TYPE_INSTANCE:
196    case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
197    case VK_OBJECT_TYPE_DEVICE:
198    case VK_OBJECT_TYPE_QUEUE:
199    case VK_OBJECT_TYPE_SEMAPHORE:
200    case VK_OBJECT_TYPE_COMMAND_BUFFER:
201    case VK_OBJECT_TYPE_FENCE:
202    case VK_OBJECT_TYPE_DEVICE_MEMORY:
203    case VK_OBJECT_TYPE_BUFFER:
204    case VK_OBJECT_TYPE_IMAGE:
205    case VK_OBJECT_TYPE_EVENT:
206    case VK_OBJECT_TYPE_QUERY_POOL:
207    case VK_OBJECT_TYPE_BUFFER_VIEW:
208    case VK_OBJECT_TYPE_IMAGE_VIEW:
209    case VK_OBJECT_TYPE_SHADER_MODULE:
210    case VK_OBJECT_TYPE_PIPELINE_CACHE:
211    case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
212    case VK_OBJECT_TYPE_RENDER_PASS:
213    case VK_OBJECT_TYPE_PIPELINE:
214    case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
215    case VK_OBJECT_TYPE_SAMPLER:
216    case VK_OBJECT_TYPE_DESCRIPTOR_POOL:
217    case VK_OBJECT_TYPE_DESCRIPTOR_SET:
218    case VK_OBJECT_TYPE_FRAMEBUFFER:
219    case VK_OBJECT_TYPE_COMMAND_POOL:
220    /* VK_VERSION_1_1 */
221    case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION:
222    case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE:
223       return true;
224    default:
225       return false;
226    }
227 }
228 
229 static inline void *
vkr_object_alloc(size_t size,VkObjectType type,vkr_object_id id)230 vkr_object_alloc(size_t size, VkObjectType type, vkr_object_id id)
231 {
232    assert(size >= sizeof(struct vkr_object));
233    assert(vkr_is_recognized_object_type(type));
234 
235    struct vkr_object *obj = calloc(1, size);
236    if (!obj)
237       return NULL;
238 
239    /* obj is only half-initialized */
240    obj->type = type;
241    obj->id = id;
242 
243    return obj;
244 }
245 
246 static inline bool
vkr_region_is_valid(const struct vkr_region * region)247 vkr_region_is_valid(const struct vkr_region *region)
248 {
249    return region->begin <= region->end;
250 }
251 
252 static inline size_t
vkr_region_size(const struct vkr_region * region)253 vkr_region_size(const struct vkr_region *region)
254 {
255    return region->end - region->begin;
256 }
257 
258 static inline bool
vkr_region_is_aligned(const struct vkr_region * region,size_t align)259 vkr_region_is_aligned(const struct vkr_region *region, size_t align)
260 {
261    assert(align && util_is_power_of_two(align));
262    return !((region->begin | region->end) & (align - 1));
263 }
264 
265 static inline bool
vkr_region_is_disjoint(const struct vkr_region * region,const struct vkr_region * other)266 vkr_region_is_disjoint(const struct vkr_region *region, const struct vkr_region *other)
267 {
268    return region->begin >= other->end || region->end <= other->begin;
269 }
270 
271 static inline bool
vkr_region_is_within(const struct vkr_region * region,const struct vkr_region * other)272 vkr_region_is_within(const struct vkr_region *region, const struct vkr_region *other)
273 {
274    /* note that when region regresses to a point at other->end, both this
275     * function and vkr_region_is_disjoint return true
276     */
277    return region->begin >= other->begin && region->end <= other->end;
278 }
279 
280 static inline struct vkr_region
vkr_region_make_relative(const struct vkr_region * region)281 vkr_region_make_relative(const struct vkr_region *region)
282 {
283    return (struct vkr_region){
284       .end = region->end - region->begin,
285    };
286 }
287 
288 #endif /* VKR_COMMON_H */
289