• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2018 Collabora Ltd.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * on the rights to use, copy, modify, merge, publish, distribute, sub
8  * license, and/or sell copies of the Software, and to permit persons to whom
9  * the Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18  * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19  * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20  * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21  * USE OR OTHER DEALINGS IN THE SOFTWARE.
22  */
23 
24 #ifndef ZINK_SCREEN_H
25 #define ZINK_SCREEN_H
26 
27 #include "zink_device_info.h"
28 #include "zink_instance.h"
29 #include "vk_dispatch_table.h"
30 
31 #include "util/u_idalloc.h"
32 #include "pipe/p_screen.h"
33 #include "util/slab.h"
34 #include "compiler/nir/nir.h"
35 #include "util/disk_cache.h"
36 #include "util/log.h"
37 #include "util/simple_mtx.h"
38 #include "util/u_queue.h"
39 #include "util/u_live_shader_cache.h"
40 #include "util/u_vertex_state_cache.h"
41 #include "pipebuffer/pb_cache.h"
42 #include "pipebuffer/pb_slab.h"
43 
44 #include <vulkan/vulkan.h>
45 
46 
47 #ifdef __cplusplus
48 extern "C" {
49 #endif
50 
51 extern uint32_t zink_debug;
52 struct hash_table;
53 struct util_dl_library;
54 
55 struct zink_batch_state;
56 struct zink_context;
57 struct zink_descriptor_layout_key;
58 struct zink_program;
59 struct zink_shader;
60 enum zink_descriptor_type;
61 
62 /* this is the spec minimum */
63 #define ZINK_SPARSE_BUFFER_PAGE_SIZE (64 * 1024)
64 
65 enum zink_debug {
66    ZINK_DEBUG_NIR = (1<<0),
67    ZINK_DEBUG_SPIRV = (1<<1),
68    ZINK_DEBUG_TGSI = (1<<2),
69    ZINK_DEBUG_VALIDATION = (1<<3),
70    ZINK_DEBUG_SYNC = (1<<4),
71    ZINK_DEBUG_COMPACT = (1<<5),
72    ZINK_DEBUG_NOREORDER = (1<<6),
73 };
74 
75 #define NUM_SLAB_ALLOCATORS 3
76 #define MIN_SLAB_ORDER 8
77 
78 #define ZINK_CONTEXT_COPY_ONLY (1<<30)
79 
80 enum zink_descriptor_mode {
81    ZINK_DESCRIPTOR_MODE_AUTO,
82    ZINK_DESCRIPTOR_MODE_LAZY,
83    ZINK_DESCRIPTOR_MODE_CACHED,
84    ZINK_DESCRIPTOR_MODE_NOTEMPLATES,
85    ZINK_DESCRIPTOR_MODE_COMPACT,
86 };
87 
88 extern enum zink_descriptor_mode zink_descriptor_mode;
89 
90 //keep in sync with zink_descriptor_type since headers can't be cross-included
91 #define ZINK_MAX_DESCRIPTOR_SETS 6
92 
93 struct zink_modifier_prop {
94     uint32_t                             drmFormatModifierCount;
95     VkDrmFormatModifierPropertiesEXT*    pDrmFormatModifierProperties;
96 };
97 
98 struct zink_screen {
99    struct pipe_screen base;
100 
101    struct util_dl_library *loader_lib;
102    PFN_vkGetInstanceProcAddr vk_GetInstanceProcAddr;
103    PFN_vkGetDeviceProcAddr vk_GetDeviceProcAddr;
104 
105    bool threaded;
106    bool is_cpu;
107    bool abort_on_hang;
108    uint64_t curr_batch; //the current batch id
109    uint32_t last_finished;
110    VkSemaphore sem;
111    VkFence fence;
112    struct util_queue flush_queue;
113    struct zink_context *copy_context;
114 
115    unsigned buffer_rebind_counter;
116    unsigned image_rebind_counter;
117    unsigned robust_ctx_count;
118 
119    struct hash_table dts;
120    simple_mtx_t dt_lock;
121 
122    bool device_lost;
123    int drm_fd;
124 
125    struct hash_table framebuffer_cache;
126 
127    struct slab_parent_pool transfer_pool;
128    struct disk_cache *disk_cache;
129    struct util_queue cache_put_thread;
130    struct util_queue cache_get_thread;
131 
132    struct util_live_shader_cache shaders;
133 
134    struct {
135       struct pb_cache bo_cache;
136       struct pb_slabs bo_slabs[NUM_SLAB_ALLOCATORS];
137       unsigned min_alloc_size;
138       uint32_t next_bo_unique_id;
139    } pb;
140    uint8_t heap_map[VK_MAX_MEMORY_TYPES];
141    VkMemoryPropertyFlags heap_flags[VK_MAX_MEMORY_TYPES];
142    bool resizable_bar;
143 
144    uint64_t total_video_mem;
145    uint64_t clamp_video_mem;
146    uint64_t total_mem;
147 
148    VkInstance instance;
149    struct zink_instance_info instance_info;
150 
151    VkPhysicalDevice pdev;
152    uint32_t vk_version, spirv_version;
153    struct util_idalloc_mt buffer_ids;
154    struct util_vertex_state_cache vertex_state_cache;
155 
156    struct zink_device_info info;
157    struct nir_shader_compiler_options nir_options;
158 
159    bool have_X8_D24_UNORM_PACK32;
160    bool have_D24_UNORM_S8_UINT;
161    bool have_D32_SFLOAT_S8_UINT;
162    bool have_triangle_fans;
163    bool need_2D_zs;
164    bool need_2D_sparse;
165    bool faked_e5sparse; //drivers may not expose R9G9B9E5 but cts requires it
166 
167    uint32_t gfx_queue;
168    uint32_t sparse_queue;
169    uint32_t max_queues;
170    uint32_t timestamp_valid_bits;
171    VkDevice dev;
172    VkQueue queue; //gfx+compute
173    VkQueue queue_sparse;
174    simple_mtx_t queue_lock;
175    VkDebugUtilsMessengerEXT debugUtilsCallbackHandle;
176 
177    uint32_t cur_custom_border_color_samplers;
178 
179    struct vk_dispatch_table vk;
180 
181    bool compact_descriptors;
182    uint8_t desc_set_id[ZINK_MAX_DESCRIPTOR_SETS];
183    bool (*descriptor_program_init)(struct zink_context *ctx, struct zink_program *pg);
184    void (*descriptor_program_deinit)(struct zink_context *ctx, struct zink_program *pg);
185    void (*descriptors_update)(struct zink_context *ctx, bool is_compute);
186    void (*context_update_descriptor_states)(struct zink_context *ctx, bool is_compute);
187    void (*context_invalidate_descriptor_state)(struct zink_context *ctx, enum pipe_shader_type shader,
188                                                enum zink_descriptor_type type,
189                                                unsigned start, unsigned count);
190    bool (*batch_descriptor_init)(struct zink_screen *screen, struct zink_batch_state *bs);
191    void (*batch_descriptor_reset)(struct zink_screen *screen, struct zink_batch_state *bs);
192    void (*batch_descriptor_deinit)(struct zink_screen *screen, struct zink_batch_state *bs);
193    bool (*descriptors_init)(struct zink_context *ctx);
194    void (*descriptors_deinit)(struct zink_context *ctx);
195 
196    struct {
197       bool dual_color_blend_by_location;
198       bool inline_uniforms;
199    } driconf;
200 
201    VkFormatProperties format_props[PIPE_FORMAT_COUNT];
202    struct zink_modifier_prop modifier_props[PIPE_FORMAT_COUNT];
203    struct {
204       uint32_t image_view;
205       uint32_t buffer_view;
206    } null_descriptor_hashes;
207 
208    VkExtent2D maxSampleLocationGridSize[5];
209 
210    struct {
211       bool color_write_missing;
212       bool depth_clip_control_missing;
213       bool implicit_sync;
214       unsigned z16_unscaled_bias;
215       unsigned z24_unscaled_bias;
216    } driver_workarounds;
217 };
218 
219 /* update last_finished to account for batch_id wrapping */
220 static inline void
zink_screen_update_last_finished(struct zink_screen * screen,uint64_t batch_id)221 zink_screen_update_last_finished(struct zink_screen *screen, uint64_t batch_id)
222 {
223    const uint32_t check_id = (uint32_t)batch_id;
224    /* last_finished may have wrapped */
225    if (screen->last_finished < UINT_MAX / 2) {
226       /* last_finished has wrapped, batch_id has not */
227       if (check_id > UINT_MAX / 2)
228          return;
229    } else if (check_id < UINT_MAX / 2) {
230       /* batch_id has wrapped, last_finished has not */
231       screen->last_finished = check_id;
232       return;
233    }
234    /* neither have wrapped */
235    screen->last_finished = MAX2(check_id, screen->last_finished);
236 }
237 
238 /* check a batch_id against last_finished while accounting for wrapping */
239 static inline bool
zink_screen_check_last_finished(struct zink_screen * screen,uint32_t batch_id)240 zink_screen_check_last_finished(struct zink_screen *screen, uint32_t batch_id)
241 {
242    const uint32_t check_id = (uint32_t)batch_id;
243    /* last_finished may have wrapped */
244    if (screen->last_finished < UINT_MAX / 2) {
245       /* last_finished has wrapped, batch_id has not */
246       if (check_id > UINT_MAX / 2)
247          return true;
248    } else if (check_id < UINT_MAX / 2) {
249       /* batch_id has wrapped, last_finished has not */
250       return false;
251    }
252    return screen->last_finished >= check_id;
253 }
254 
255 bool
256 zink_screen_init_semaphore(struct zink_screen *screen);
257 
258 static inline bool
zink_screen_handle_vkresult(struct zink_screen * screen,VkResult ret)259 zink_screen_handle_vkresult(struct zink_screen *screen, VkResult ret)
260 {
261    bool success = false;
262    switch (ret) {
263    case VK_SUCCESS:
264       success = true;
265       break;
266    case VK_ERROR_DEVICE_LOST:
267       screen->device_lost = true;
268       mesa_loge("zink: DEVICE LOST!\n");
269       /* if nothing can save us, abort */
270       if (screen->abort_on_hang && !screen->robust_ctx_count)
271          abort();
272       FALLTHROUGH;
273    default:
274       success = false;
275       break;
276    }
277    return success;
278 }
279 
280 static inline struct zink_screen *
zink_screen(struct pipe_screen * pipe)281 zink_screen(struct pipe_screen *pipe)
282 {
283    return (struct zink_screen *)pipe;
284 }
285 
286 
287 struct mem_cache_entry {
288    VkDeviceMemory mem;
289    void *map;
290 };
291 
292 #define VKCTX(fn) zink_screen(ctx->base.screen)->vk.fn
293 #define VKSCR(fn) screen->vk.fn
294 
295 VkFormat
296 zink_get_format(struct zink_screen *screen, enum pipe_format format);
297 
298 bool
299 zink_screen_timeline_wait(struct zink_screen *screen, uint64_t batch_id, uint64_t timeout);
300 
301 bool
302 zink_is_depth_format_supported(struct zink_screen *screen, VkFormat format);
303 
304 #define GET_PROC_ADDR_INSTANCE_LOCAL(screen, instance, x) PFN_vk##x vk_##x = (PFN_vk##x)(screen)->vk_GetInstanceProcAddr(instance, "vk"#x)
305 
306 void
307 zink_screen_update_pipeline_cache(struct zink_screen *screen, struct zink_program *pg);
308 
309 void
310 zink_screen_get_pipeline_cache(struct zink_screen *screen, struct zink_program *pg);
311 
312 void
313 zink_screen_init_descriptor_funcs(struct zink_screen *screen, bool fallback);
314 
315 void
316 zink_stub_function_not_loaded(void);
317 
318 #define warn_missing_feature(warned, feat) \
319    do { \
320       if (!warned) { \
321          mesa_logw("WARNING: Incorrect rendering will happen " \
322                          "because the Vulkan device doesn't support " \
323                          "the '%s' feature\n", feat); \
324          warned = true; \
325       } \
326    } while (0)
327 
328 #ifdef __cplusplus
329 }
330 #endif
331 
332 #endif
333