1 /*
2 * Copyright © 2019 Red Hat.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #pragma once
25 #include <stdlib.h>
26 #include <stdio.h>
27 #include <stdbool.h>
28 #include <string.h>
29 #include <assert.h>
30 #include <stdint.h>
31
32 #include "util/macros.h"
33 #include "util/list.h"
34 #include "util/u_dynarray.h"
35 #include "util/simple_mtx.h"
36 #include "util/u_queue.h"
37
38 #include "compiler/shader_enums.h"
39 #include "pipe/p_screen.h"
40 #include "pipe/p_state.h"
41 #include "cso_cache/cso_context.h"
42 #include "nir.h"
43
44 /* Pre-declarations needed for WSI entrypoints */
45 struct wl_surface;
46 struct wl_display;
47 typedef struct xcb_connection_t xcb_connection_t;
48 typedef uint32_t xcb_visualid_t;
49 typedef uint32_t xcb_window_t;
50
51 #define VK_PROTOTYPES
52 #include <vulkan/vulkan.h>
53 #include <vulkan/vk_icd.h>
54
55 #include "lvp_entrypoints.h"
56 #include "vk_device.h"
57 #include "vk_instance.h"
58 #include "vk_image.h"
59 #include "vk_log.h"
60 #include "vk_physical_device.h"
61 #include "vk_shader_module.h"
62 #include "vk_util.h"
63 #include "vk_format.h"
64 #include "vk_cmd_queue.h"
65 #include "vk_command_buffer.h"
66 #include "vk_queue.h"
67
68 #include "wsi_common.h"
69
70 #include <assert.h>
71 #ifdef __cplusplus
72 extern "C" {
73 #endif
74
75 #define MAX_SETS 8
76 #define MAX_PUSH_CONSTANTS_SIZE 128
77 #define MAX_PUSH_DESCRIPTORS 32
78
79 #ifdef _WIN32
80 #define lvp_printflike(a, b)
81 #else
82 #define lvp_printflike(a, b) __attribute__((__format__(__printf__, a, b)))
83 #endif
84
85 int lvp_get_instance_entrypoint_index(const char *name);
86 int lvp_get_device_entrypoint_index(const char *name);
87 int lvp_get_physical_device_entrypoint_index(const char *name);
88
89 const char *lvp_get_instance_entry_name(int index);
90 const char *lvp_get_physical_device_entry_name(int index);
91 const char *lvp_get_device_entry_name(int index);
92
93 bool lvp_instance_entrypoint_is_enabled(int index, uint32_t core_version,
94 const struct vk_instance_extension_table *instance);
95 bool lvp_physical_device_entrypoint_is_enabled(int index, uint32_t core_version,
96 const struct vk_instance_extension_table *instance);
97 bool lvp_device_entrypoint_is_enabled(int index, uint32_t core_version,
98 const struct vk_instance_extension_table *instance,
99 const struct vk_device_extension_table *device);
100
101 #define LVP_DEBUG_ALL_ENTRYPOINTS (1 << 0)
102
103 void __lvp_finishme(const char *file, int line, const char *format, ...)
104 lvp_printflike(3, 4);
105
106 #define lvp_finishme(format, ...) \
107 __lvp_finishme(__FILE__, __LINE__, format, ##__VA_ARGS__);
108
109 #define stub_return(v) \
110 do { \
111 lvp_finishme("stub %s", __func__); \
112 return (v); \
113 } while (0)
114
115 #define stub() \
116 do { \
117 lvp_finishme("stub %s", __func__); \
118 return; \
119 } while (0)
120
121 #define LVP_STAGE_MASK ((1 << MESA_SHADER_STAGES) - 1)
122
123 #define lvp_foreach_stage(stage, stage_bits) \
124 for (gl_shader_stage stage, \
125 __tmp = (gl_shader_stage)((stage_bits) & LVP_STAGE_MASK); \
126 stage = ffs(__tmp) - 1, __tmp; \
127 __tmp &= ~(1 << (stage)))
128
129 struct lvp_physical_device {
130 struct vk_physical_device vk;
131
132 struct pipe_loader_device *pld;
133 struct pipe_screen *pscreen;
134 uint32_t max_images;
135
136 struct wsi_device wsi_device;
137 };
138
139 struct lvp_instance {
140 struct vk_instance vk;
141
142 uint32_t apiVersion;
143 int physicalDeviceCount;
144 struct lvp_physical_device physicalDevice;
145
146 uint64_t debug_flags;
147
148 struct pipe_loader_device *devs;
149 int num_devices;
150 };
151
152 VkResult lvp_init_wsi(struct lvp_physical_device *physical_device);
153 void lvp_finish_wsi(struct lvp_physical_device *physical_device);
154
155 bool lvp_physical_device_extension_supported(struct lvp_physical_device *dev,
156 const char *name);
157
158 struct lvp_queue {
159 struct vk_queue vk;
160 struct lvp_device * device;
161 struct pipe_context *ctx;
162 struct cso_context *cso;
163 bool shutdown;
164 uint64_t timeline;
165 struct util_queue queue;
166 simple_mtx_t last_lock;
167 uint64_t last_finished;
168 uint64_t last_fence_timeline;
169 struct pipe_fence_handle *last_fence;
170 volatile int count;
171 };
172
173 struct lvp_semaphore_wait {
174 struct lvp_semaphore *sema;
175 uint64_t wait;
176 };
177
178 struct lvp_queue_work {
179 struct list_head list;
180 uint32_t cmd_buffer_count;
181 uint32_t timeline_count;
182 uint32_t wait_count;
183 uint64_t timeline;
184 struct lvp_fence *fence;
185 struct lvp_cmd_buffer **cmd_buffers;
186 struct lvp_semaphore_timeline **timelines;
187 VkSemaphore *waits;
188 uint64_t *wait_vals;
189 };
190
191 struct lvp_pipeline_cache {
192 struct vk_object_base base;
193 struct lvp_device * device;
194 VkAllocationCallbacks alloc;
195 };
196
197 struct lvp_device {
198 struct vk_device vk;
199
200 struct lvp_queue queue;
201 struct lvp_instance * instance;
202 struct lvp_physical_device *physical_device;
203 struct pipe_screen *pscreen;
204 };
205
206 void lvp_device_get_cache_uuid(void *uuid);
207
208 enum lvp_device_memory_type {
209 LVP_DEVICE_MEMORY_TYPE_DEFAULT,
210 LVP_DEVICE_MEMORY_TYPE_USER_PTR,
211 LVP_DEVICE_MEMORY_TYPE_OPAQUE_FD,
212 };
213
214 struct lvp_device_memory {
215 struct vk_object_base base;
216 struct pipe_memory_allocation *pmem;
217 uint32_t type_index;
218 VkDeviceSize map_size;
219 void * map;
220 enum lvp_device_memory_type memory_type;
221 int backed_fd;
222 };
223
224 struct lvp_image {
225 struct vk_image vk;
226 VkDeviceSize size;
227 uint32_t alignment;
228 struct pipe_memory_allocation *pmem;
229 unsigned memory_offset;
230 struct pipe_resource *bo;
231 };
232
233 static inline uint32_t
lvp_get_layerCount(const struct lvp_image * image,const VkImageSubresourceRange * range)234 lvp_get_layerCount(const struct lvp_image *image,
235 const VkImageSubresourceRange *range)
236 {
237 return range->layerCount == VK_REMAINING_ARRAY_LAYERS ?
238 image->bo->array_size - range->baseArrayLayer : range->layerCount;
239 }
240
241 static inline uint32_t
lvp_get_levelCount(const struct lvp_image * image,const VkImageSubresourceRange * range)242 lvp_get_levelCount(const struct lvp_image *image,
243 const VkImageSubresourceRange *range)
244 {
245 return range->levelCount == VK_REMAINING_MIP_LEVELS ?
246 (image->bo->last_level + 1) - range->baseMipLevel : range->levelCount;
247 }
248
249 struct lvp_image_view {
250 struct vk_object_base base;
251 const struct lvp_image *image; /**< VkImageViewCreateInfo::image */
252
253 VkImageViewType view_type;
254 VkFormat format;
255 enum pipe_format pformat;
256 VkComponentMapping components;
257 VkImageSubresourceRange subresourceRange;
258
259 struct pipe_surface *surface; /* have we created a pipe surface for this? */
260 };
261
262 struct lvp_subpass_attachment {
263 uint32_t attachment;
264 VkImageLayout layout;
265 bool in_render_loop;
266 };
267
268 struct lvp_subpass {
269 uint32_t attachment_count;
270 struct lvp_subpass_attachment * attachments;
271
272 uint32_t input_count;
273 uint32_t color_count;
274 struct lvp_subpass_attachment * input_attachments;
275 struct lvp_subpass_attachment * color_attachments;
276 struct lvp_subpass_attachment * resolve_attachments;
277 struct lvp_subpass_attachment * depth_stencil_attachment;
278 struct lvp_subpass_attachment * ds_resolve_attachment;
279 VkResolveModeFlagBits depth_resolve_mode;
280 VkResolveModeFlagBits stencil_resolve_mode;
281
282 /** Subpass has at least one color resolve attachment */
283 bool has_color_resolve;
284
285 /** Subpass has at least one color attachment */
286 bool has_color_att;
287
288 VkSampleCountFlagBits max_sample_count;
289
290 uint32_t view_mask;
291 };
292
293 struct lvp_render_pass_attachment {
294 VkFormat format;
295 uint32_t samples;
296 VkAttachmentLoadOp load_op;
297 VkAttachmentLoadOp stencil_load_op;
298 VkImageLayout initial_layout;
299 VkImageLayout final_layout;
300
301 /* The subpass id in which the attachment will be used first/last. */
302 uint32_t first_subpass_idx;
303 uint32_t last_subpass_idx;
304 };
305
306 struct lvp_render_pass {
307 struct vk_object_base base;
308 uint32_t attachment_count;
309 uint32_t subpass_count;
310 struct lvp_subpass_attachment * subpass_attachments;
311 struct lvp_render_pass_attachment * attachments;
312 bool has_color_attachment;
313 bool has_zs_attachment;
314 struct lvp_subpass subpasses[0];
315 };
316
317 struct lvp_sampler {
318 struct vk_object_base base;
319 VkSamplerCreateInfo create_info;
320 union pipe_color_union border_color;
321 VkSamplerReductionMode reduction_mode;
322 uint32_t state[4];
323 };
324
325 struct lvp_framebuffer {
326 struct vk_object_base base;
327 uint32_t width;
328 uint32_t height;
329 uint32_t layers;
330
331 bool imageless;
332 uint32_t attachment_count;
333 struct lvp_image_view * attachments[0];
334 };
335
336 struct lvp_descriptor_set_binding_layout {
337 uint16_t descriptor_index;
338 /* Number of array elements in this binding */
339 VkDescriptorType type;
340 uint16_t array_size;
341 bool valid;
342
343 int16_t dynamic_index;
344 struct {
345 int16_t const_buffer_index;
346 int16_t shader_buffer_index;
347 int16_t sampler_index;
348 int16_t sampler_view_index;
349 int16_t image_index;
350 } stage[MESA_SHADER_STAGES];
351
352 /* Immutable samplers (or NULL if no immutable samplers) */
353 struct lvp_sampler **immutable_samplers;
354 };
355
356 struct lvp_descriptor_set_layout {
357 struct vk_object_base base;
358
359 const VkAllocationCallbacks *alloc;
360 /* Descriptor set layouts can be destroyed at almost any time */
361 uint32_t ref_cnt;
362
363 /* Number of bindings in this descriptor set */
364 uint16_t binding_count;
365
366 /* Total size of the descriptor set with room for all array entries */
367 uint16_t size;
368
369 /* Shader stages affected by this descriptor set */
370 uint16_t shader_stages;
371
372 struct {
373 uint16_t const_buffer_count;
374 uint16_t shader_buffer_count;
375 uint16_t sampler_count;
376 uint16_t sampler_view_count;
377 uint16_t image_count;
378 } stage[MESA_SHADER_STAGES];
379
380 /* Number of dynamic offsets used by this descriptor set */
381 uint16_t dynamic_offset_count;
382
383 /* Bindings in this descriptor set */
384 struct lvp_descriptor_set_binding_layout binding[0];
385 };
386
387 void lvp_descriptor_set_layout_destroy(struct lvp_device *device,
388 struct lvp_descriptor_set_layout *layout);
389
390 static inline void
lvp_descriptor_set_layout_ref(struct lvp_descriptor_set_layout * layout)391 lvp_descriptor_set_layout_ref(struct lvp_descriptor_set_layout *layout)
392 {
393 assert(layout && layout->ref_cnt >= 1);
394 p_atomic_inc(&layout->ref_cnt);
395 }
396
397 static inline void
lvp_descriptor_set_layout_unref(struct lvp_device * device,struct lvp_descriptor_set_layout * layout)398 lvp_descriptor_set_layout_unref(struct lvp_device *device,
399 struct lvp_descriptor_set_layout *layout)
400 {
401 assert(layout && layout->ref_cnt >= 1);
402 if (p_atomic_dec_zero(&layout->ref_cnt))
403 lvp_descriptor_set_layout_destroy(device, layout);
404 }
405
406 union lvp_descriptor_info {
407 struct {
408 struct lvp_sampler *sampler;
409 struct lvp_image_view *iview;
410 VkImageLayout image_layout;
411 };
412 struct {
413 struct lvp_buffer *buffer;
414 VkDeviceSize offset;
415 VkDeviceSize range;
416 };
417 struct lvp_buffer_view *buffer_view;
418 };
419
420 struct lvp_descriptor {
421 VkDescriptorType type;
422
423 union lvp_descriptor_info info;
424 };
425
426 struct lvp_descriptor_set {
427 struct vk_object_base base;
428 struct lvp_descriptor_set_layout *layout;
429 struct list_head link;
430 struct lvp_descriptor descriptors[0];
431 };
432
433 struct lvp_descriptor_pool {
434 struct vk_object_base base;
435 VkDescriptorPoolCreateFlags flags;
436 uint32_t max_sets;
437
438 struct list_head sets;
439 };
440
441 struct lvp_descriptor_update_template {
442 struct vk_object_base base;
443 uint32_t entry_count;
444 uint32_t set;
445 VkDescriptorUpdateTemplateType type;
446 VkPipelineBindPoint bind_point;
447 struct lvp_pipeline_layout *pipeline_layout;
448 VkDescriptorUpdateTemplateEntry entry[0];
449 };
450
451 VkResult
452 lvp_descriptor_set_create(struct lvp_device *device,
453 struct lvp_descriptor_set_layout *layout,
454 struct lvp_descriptor_set **out_set);
455
456 void
457 lvp_descriptor_set_destroy(struct lvp_device *device,
458 struct lvp_descriptor_set *set);
459
460 struct lvp_pipeline_layout {
461 struct vk_object_base base;
462 struct {
463 struct lvp_descriptor_set_layout *layout;
464 uint32_t dynamic_offset_start;
465 } set[MAX_SETS];
466
467 uint32_t num_sets;
468 uint32_t push_constant_size;
469 struct {
470 bool has_dynamic_offsets;
471 } stage[MESA_SHADER_STAGES];
472 };
473
474 struct lvp_pipeline {
475 struct vk_object_base base;
476 struct lvp_device * device;
477 struct lvp_pipeline_layout * layout;
478
479 void *mem_ctx;
480 bool is_compute_pipeline;
481 bool force_min_sample;
482 nir_shader *pipeline_nir[MESA_SHADER_STAGES];
483 void *shader_cso[PIPE_SHADER_TYPES];
484 VkGraphicsPipelineCreateInfo graphics_create_info;
485 VkComputePipelineCreateInfo compute_create_info;
486 uint32_t line_stipple_factor;
487 uint16_t line_stipple_pattern;
488 bool line_stipple_enable;
489 bool line_smooth;
490 bool disable_multisample;
491 bool line_rectangular;
492 bool gs_output_lines;
493 bool provoking_vertex_last;
494 };
495
496 struct lvp_event {
497 struct vk_object_base base;
498 volatile uint64_t event_storage;
499 };
500
501 struct lvp_fence {
502 struct vk_object_base base;
503 uint64_t timeline;
504 struct util_queue_fence fence;
505 struct pipe_fence_handle *handle;
506 bool signalled;
507 };
508
509 struct lvp_semaphore_timeline {
510 struct lvp_semaphore_timeline *next;
511 uint64_t signal; //api
512 uint64_t timeline; //queue
513 struct pipe_fence_handle *fence;
514 };
515
516 struct lvp_semaphore {
517 struct vk_object_base base;
518 bool is_timeline;
519 uint64_t current;
520 simple_mtx_t lock;
521 mtx_t submit_lock;
522 cnd_t submit;
523 void *mem;
524 struct util_dynarray links;
525 struct lvp_semaphore_timeline *timeline;
526 struct lvp_semaphore_timeline *latest;
527 };
528
529 struct lvp_buffer {
530 struct vk_object_base base;
531
532 VkDeviceSize size;
533
534 VkBufferUsageFlags usage;
535 VkDeviceSize offset;
536
537 struct pipe_memory_allocation *pmem;
538 struct pipe_resource *bo;
539 uint64_t total_size;
540 };
541
542 struct lvp_buffer_view {
543 struct vk_object_base base;
544 VkFormat format;
545 enum pipe_format pformat;
546 struct lvp_buffer *buffer;
547 uint32_t offset;
548 uint64_t range;
549 };
550
551 struct lvp_query_pool {
552 struct vk_object_base base;
553 VkQueryType type;
554 uint32_t count;
555 VkQueryPipelineStatisticFlags pipeline_stats;
556 enum pipe_query_type base_type;
557 struct pipe_query *queries[0];
558 };
559
560 struct lvp_cmd_pool {
561 struct vk_object_base base;
562 VkAllocationCallbacks alloc;
563 struct list_head cmd_buffers;
564 struct list_head free_cmd_buffers;
565 };
566
567
568 enum lvp_cmd_buffer_status {
569 LVP_CMD_BUFFER_STATUS_INVALID,
570 LVP_CMD_BUFFER_STATUS_INITIAL,
571 LVP_CMD_BUFFER_STATUS_RECORDING,
572 LVP_CMD_BUFFER_STATUS_EXECUTABLE,
573 LVP_CMD_BUFFER_STATUS_PENDING,
574 };
575
576 struct lvp_cmd_buffer {
577 struct vk_command_buffer vk;
578
579 struct lvp_device * device;
580
581 VkCommandBufferLevel level;
582 enum lvp_cmd_buffer_status status;
583 struct lvp_cmd_pool * pool;
584 struct list_head pool_link;
585
586 struct vk_cmd_queue queue;
587
588 uint8_t push_constants[MAX_PUSH_CONSTANTS_SIZE];
589 };
590
591
592 #define LVP_FROM_HANDLE(__lvp_type, __name, __handle) \
593 struct __lvp_type *__name = __lvp_type ## _from_handle(__handle)
594
595 VK_DEFINE_HANDLE_CASTS(lvp_cmd_buffer, vk.base, VkCommandBuffer,
596 VK_OBJECT_TYPE_COMMAND_BUFFER)
597 VK_DEFINE_HANDLE_CASTS(lvp_device, vk.base, VkDevice, VK_OBJECT_TYPE_DEVICE)
598 VK_DEFINE_HANDLE_CASTS(lvp_instance, vk.base, VkInstance, VK_OBJECT_TYPE_INSTANCE)
599 VK_DEFINE_HANDLE_CASTS(lvp_physical_device, vk.base, VkPhysicalDevice,
600 VK_OBJECT_TYPE_PHYSICAL_DEVICE)
601 VK_DEFINE_HANDLE_CASTS(lvp_queue, vk.base, VkQueue, VK_OBJECT_TYPE_QUEUE)
602
603 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_cmd_pool, base,VkCommandPool,
604 VK_OBJECT_TYPE_COMMAND_POOL)
605 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_buffer, base, VkBuffer,
606 VK_OBJECT_TYPE_BUFFER)
607 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_buffer_view, base, VkBufferView,
608 VK_OBJECT_TYPE_BUFFER_VIEW)
609 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_pool, base, VkDescriptorPool,
610 VK_OBJECT_TYPE_DESCRIPTOR_POOL)
611 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_set, base, VkDescriptorSet,
612 VK_OBJECT_TYPE_DESCRIPTOR_SET)
613 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_set_layout, base, VkDescriptorSetLayout,
614 VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT)
615 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_update_template, base, VkDescriptorUpdateTemplate,
616 VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE)
617 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_device_memory, base, VkDeviceMemory,
618 VK_OBJECT_TYPE_DEVICE_MEMORY)
619 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_event, base, VkEvent, VK_OBJECT_TYPE_EVENT)
620 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_framebuffer, base, VkFramebuffer,
621 VK_OBJECT_TYPE_FRAMEBUFFER)
622 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_image, vk.base, VkImage, VK_OBJECT_TYPE_IMAGE)
623 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_image_view, base, VkImageView,
624 VK_OBJECT_TYPE_IMAGE_VIEW);
625 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_pipeline_cache, base, VkPipelineCache,
626 VK_OBJECT_TYPE_PIPELINE_CACHE)
627 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_pipeline, base, VkPipeline,
628 VK_OBJECT_TYPE_PIPELINE)
629 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_pipeline_layout, base, VkPipelineLayout,
630 VK_OBJECT_TYPE_PIPELINE_LAYOUT)
631 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_query_pool, base, VkQueryPool,
632 VK_OBJECT_TYPE_QUERY_POOL)
633 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_render_pass, base, VkRenderPass,
634 VK_OBJECT_TYPE_RENDER_PASS)
635 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_sampler, base, VkSampler,
636 VK_OBJECT_TYPE_SAMPLER)
637 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_fence, base, VkFence, VK_OBJECT_TYPE_FENCE);
638 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_semaphore, base, VkSemaphore,
639 VK_OBJECT_TYPE_SEMAPHORE);
640
641 struct lvp_attachment_state {
642 VkImageAspectFlags pending_clear_aspects;
643 VkClearValue clear_value;
644 };
645
646 struct lvp_write_descriptor {
647 uint32_t dst_binding;
648 uint32_t dst_array_element;
649 uint32_t descriptor_count;
650 VkDescriptorType descriptor_type;
651 };
652
653 struct lvp_cmd_push_descriptor_set {
654 VkPipelineBindPoint bind_point;
655 struct lvp_pipeline_layout *layout;
656 uint32_t set;
657 uint32_t descriptor_write_count;
658 struct lvp_write_descriptor *descriptors;
659 union lvp_descriptor_info *infos;
660 };
661
662 VkResult lvp_execute_cmds(struct lvp_device *device,
663 struct lvp_queue *queue,
664 struct lvp_cmd_buffer *cmd_buffer);
665
666 struct lvp_image *lvp_swapchain_get_image(VkSwapchainKHR swapchain,
667 uint32_t index);
668
669 static inline enum pipe_format
lvp_vk_format_to_pipe_format(VkFormat format)670 lvp_vk_format_to_pipe_format(VkFormat format)
671 {
672 /* Some formats cause problems with CTS right now.*/
673 if (format == VK_FORMAT_R4G4B4A4_UNORM_PACK16 ||
674 format == VK_FORMAT_R5G5B5A1_UNORM_PACK16 ||
675 format == VK_FORMAT_R8_SRGB ||
676 format == VK_FORMAT_R8G8_SRGB ||
677 format == VK_FORMAT_R64G64B64A64_SFLOAT ||
678 format == VK_FORMAT_R64_SFLOAT ||
679 format == VK_FORMAT_R64G64_SFLOAT ||
680 format == VK_FORMAT_R64G64B64_SFLOAT ||
681 format == VK_FORMAT_A2R10G10B10_SINT_PACK32 ||
682 format == VK_FORMAT_A2B10G10R10_SINT_PACK32 ||
683 format == VK_FORMAT_G8B8G8R8_422_UNORM ||
684 format == VK_FORMAT_B8G8R8G8_422_UNORM ||
685 format == VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM ||
686 format == VK_FORMAT_G8_B8R8_2PLANE_420_UNORM ||
687 format == VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM ||
688 format == VK_FORMAT_G8_B8R8_2PLANE_422_UNORM ||
689 format == VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM ||
690 format == VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM ||
691 format == VK_FORMAT_G16_B16R16_2PLANE_420_UNORM ||
692 format == VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM ||
693 format == VK_FORMAT_G16_B16R16_2PLANE_422_UNORM ||
694 format == VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM ||
695 format == VK_FORMAT_D16_UNORM_S8_UINT)
696 return PIPE_FORMAT_NONE;
697
698 return vk_format_to_pipe_format(format);
699 }
700
701 void
702 queue_thread_noop(void *data, void *gdata, int thread_index);
703 #ifdef __cplusplus
704 }
705 #endif
706