• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2019 Red Hat.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 
24 #pragma once
25 #include <stdlib.h>
26 #include <stdio.h>
27 #include <stdbool.h>
28 #include <string.h>
29 #include <strings.h>
30 #include <pthread.h>
31 #include <assert.h>
32 #include <stdint.h>
33 
34 #include "util/macros.h"
35 #include "util/list.h"
36 
37 #include "compiler/shader_enums.h"
38 #include "pipe/p_screen.h"
39 #include "pipe/p_state.h"
40 #include "nir.h"
41 
42 /* Pre-declarations needed for WSI entrypoints */
43 struct wl_surface;
44 struct wl_display;
45 typedef struct xcb_connection_t xcb_connection_t;
46 typedef uint32_t xcb_visualid_t;
47 typedef uint32_t xcb_window_t;
48 
49 #define VK_PROTOTYPES
50 #include <vulkan/vulkan.h>
51 #include <vulkan/vk_icd.h>
52 
53 #include "lvp_extensions.h"
54 #include "lvp_entrypoints.h"
55 #include "vk_object.h"
56 
57 #include "wsi_common.h"
58 
59 #include <assert.h>
60 #ifdef __cplusplus
61 extern "C" {
62 #endif
63 
64 #define MAX_SETS         8
65 #define MAX_PUSH_CONSTANTS_SIZE 128
66 
67 #define lvp_printflike(a, b) __attribute__((__format__(__printf__, a, b)))
68 
69 #define typed_memcpy(dest, src, count) ({ \
70    memcpy((dest), (src), (count) * sizeof(*(src))); \
71 })
72 
73 int lvp_get_instance_entrypoint_index(const char *name);
74 int lvp_get_device_entrypoint_index(const char *name);
75 int lvp_get_physical_device_entrypoint_index(const char *name);
76 
77 const char *lvp_get_instance_entry_name(int index);
78 const char *lvp_get_physical_device_entry_name(int index);
79 const char *lvp_get_device_entry_name(int index);
80 
81 bool lvp_instance_entrypoint_is_enabled(int index, uint32_t core_version,
82                                          const struct lvp_instance_extension_table *instance);
83 bool lvp_physical_device_entrypoint_is_enabled(int index, uint32_t core_version,
84                                                 const struct lvp_instance_extension_table *instance);
85 bool lvp_device_entrypoint_is_enabled(int index, uint32_t core_version,
86                                        const struct lvp_instance_extension_table *instance,
87                                        const struct lvp_device_extension_table *device);
88 
89 void *lvp_lookup_entrypoint(const char *name);
90 
91 #define LVP_DEFINE_HANDLE_CASTS(__lvp_type, __VkType)                      \
92                                                                            \
93    static inline struct __lvp_type *                                       \
94    __lvp_type ## _from_handle(__VkType _handle)                            \
95    {                                                                       \
96       return (struct __lvp_type *) _handle;                                \
97    }                                                                       \
98                                                                            \
99    static inline __VkType                                                  \
100    __lvp_type ## _to_handle(struct __lvp_type *_obj)                       \
101    {                                                                       \
102       return (__VkType) _obj;                                              \
103    }
104 
105 #define LVP_DEFINE_NONDISP_HANDLE_CASTS(__lvp_type, __VkType)              \
106                                                                            \
107    static inline struct __lvp_type *                                       \
108    __lvp_type ## _from_handle(__VkType _handle)                            \
109    {                                                                       \
110       return (struct __lvp_type *)(uintptr_t) _handle;                     \
111    }                                                                       \
112                                                                            \
113    static inline __VkType                                                  \
114    __lvp_type ## _to_handle(struct __lvp_type *_obj)                       \
115    {                                                                       \
116       return (__VkType)(uintptr_t) _obj;                                   \
117    }
118 
119 #define LVP_FROM_HANDLE(__lvp_type, __name, __handle) \
120    struct __lvp_type *__name = __lvp_type ## _from_handle(__handle)
121 
122 LVP_DEFINE_HANDLE_CASTS(lvp_cmd_buffer, VkCommandBuffer)
123 LVP_DEFINE_HANDLE_CASTS(lvp_device, VkDevice)
124 LVP_DEFINE_HANDLE_CASTS(lvp_instance, VkInstance)
125 LVP_DEFINE_HANDLE_CASTS(lvp_physical_device, VkPhysicalDevice)
126 LVP_DEFINE_HANDLE_CASTS(lvp_queue, VkQueue)
127 
128 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_cmd_pool, VkCommandPool)
129 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_buffer, VkBuffer)
130 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_buffer_view, VkBufferView)
131 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_pool, VkDescriptorPool)
132 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_set, VkDescriptorSet)
133 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_set_layout, VkDescriptorSetLayout)
134 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_device_memory, VkDeviceMemory)
135 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_event, VkEvent)
136 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_framebuffer, VkFramebuffer)
137 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_image, VkImage)
138 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_image_view, VkImageView);
139 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_pipeline_cache, VkPipelineCache)
140 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_pipeline, VkPipeline)
141 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_pipeline_layout, VkPipelineLayout)
142 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_query_pool, VkQueryPool)
143 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_render_pass, VkRenderPass)
144 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_sampler, VkSampler)
145 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_shader_module, VkShaderModule)
146 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_fence, VkFence);
147 LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_semaphore, VkSemaphore);
148 
149 /* Whenever we generate an error, pass it through this function. Useful for
150  * debugging, where we can break on it. Only call at error site, not when
151  * propagating errors. Might be useful to plug in a stack trace here.
152  */
153 
154 VkResult __vk_errorf(struct lvp_instance *instance, VkResult error, const char *file, int line, const char *format, ...);
155 
156 #define LVP_DEBUG_ALL_ENTRYPOINTS (1 << 0)
157 
158 #define vk_error(instance, error) __vk_errorf(instance, error, __FILE__, __LINE__, NULL);
159 #define vk_errorf(instance, error, format, ...) __vk_errorf(instance, error, __FILE__, __LINE__, format, ## __VA_ARGS__);
160 
161 void __lvp_finishme(const char *file, int line, const char *format, ...)
162    lvp_printflike(3, 4);
163 
164 #define lvp_finishme(format, ...) \
165    __lvp_finishme(__FILE__, __LINE__, format, ##__VA_ARGS__);
166 
167 #define stub_return(v) \
168    do { \
169       lvp_finishme("stub %s", __func__); \
170       return (v); \
171    } while (0)
172 
173 #define stub() \
174    do { \
175       lvp_finishme("stub %s", __func__); \
176       return; \
177    } while (0)
178 
179 struct lvp_shader_module {
180    struct vk_object_base base;
181    uint32_t                                     size;
182    char                                         data[0];
183 };
184 
185 static inline gl_shader_stage
vk_to_mesa_shader_stage(VkShaderStageFlagBits vk_stage)186 vk_to_mesa_shader_stage(VkShaderStageFlagBits vk_stage)
187 {
188    assert(__builtin_popcount(vk_stage) == 1);
189    return ffs(vk_stage) - 1;
190 }
191 
192 static inline VkShaderStageFlagBits
mesa_to_vk_shader_stage(gl_shader_stage mesa_stage)193 mesa_to_vk_shader_stage(gl_shader_stage mesa_stage)
194 {
195    return (1 << mesa_stage);
196 }
197 
198 #define LVP_STAGE_MASK ((1 << MESA_SHADER_STAGES) - 1)
199 
200 #define lvp_foreach_stage(stage, stage_bits)                         \
201    for (gl_shader_stage stage,                                       \
202         __tmp = (gl_shader_stage)((stage_bits) & LVP_STAGE_MASK);    \
203         stage = __builtin_ffs(__tmp) - 1, __tmp;                     \
204         __tmp &= ~(1 << (stage)))
205 
206 struct lvp_physical_device {
207    VK_LOADER_DATA                              _loader_data;
208    struct lvp_instance *                       instance;
209 
210    struct pipe_loader_device *pld;
211    struct pipe_screen *pscreen;
212    uint32_t max_images;
213 
214    struct wsi_device                       wsi_device;
215    struct lvp_device_extension_table supported_extensions;
216 };
217 
218 struct lvp_instance {
219    struct vk_object_base base;
220 
221    VkAllocationCallbacks alloc;
222 
223    uint32_t apiVersion;
224    int physicalDeviceCount;
225    struct lvp_physical_device physicalDevice;
226 
227    uint64_t debug_flags;
228 
229    struct pipe_loader_device *devs;
230    int num_devices;
231 
232    struct lvp_instance_extension_table enabled_extensions;
233    struct lvp_instance_dispatch_table dispatch;
234    struct lvp_physical_device_dispatch_table physical_device_dispatch;
235    struct lvp_device_dispatch_table device_dispatch;
236 };
237 
238 VkResult lvp_init_wsi(struct lvp_physical_device *physical_device);
239 void lvp_finish_wsi(struct lvp_physical_device *physical_device);
240 
241 bool lvp_instance_extension_supported(const char *name);
242 uint32_t lvp_physical_device_api_version(struct lvp_physical_device *dev);
243 bool lvp_physical_device_extension_supported(struct lvp_physical_device *dev,
244                                               const char *name);
245 
246 struct lvp_queue {
247    VK_LOADER_DATA                              _loader_data;
248    VkDeviceQueueCreateFlags flags;
249    struct lvp_device *                         device;
250    struct pipe_context *ctx;
251    bool shutdown;
252    thrd_t exec_thread;
253    mtx_t m;
254    cnd_t new_work;
255    struct list_head workqueue;
256    uint32_t count;
257 };
258 
259 struct lvp_queue_work {
260    struct list_head list;
261    uint32_t cmd_buffer_count;
262    struct lvp_cmd_buffer **cmd_buffers;
263    struct lvp_fence *fence;
264 };
265 
266 struct lvp_pipeline_cache {
267    struct vk_object_base                        base;
268    struct lvp_device *                          device;
269    VkAllocationCallbacks                        alloc;
270 };
271 
272 struct lvp_device {
273    struct vk_device vk;
274 
275    struct lvp_queue queue;
276    struct lvp_instance *                       instance;
277    struct lvp_physical_device *physical_device;
278    struct pipe_screen *pscreen;
279 
280    mtx_t fence_lock;
281    struct lvp_device_extension_table enabled_extensions;
282    struct lvp_device_dispatch_table dispatch;
283 };
284 
285 void lvp_device_get_cache_uuid(void *uuid);
286 
287 struct lvp_device_memory {
288    struct vk_object_base base;
289    struct pipe_memory_allocation *pmem;
290    uint32_t                                     type_index;
291    VkDeviceSize                                 map_size;
292    void *                                       map;
293 };
294 
295 struct lvp_image {
296    struct vk_object_base base;
297    VkImageType type;
298    VkFormat vk_format;
299    VkDeviceSize size;
300    uint32_t alignment;
301    struct pipe_resource *bo;
302 };
303 
304 static inline uint32_t
lvp_get_layerCount(const struct lvp_image * image,const VkImageSubresourceRange * range)305 lvp_get_layerCount(const struct lvp_image *image,
306                    const VkImageSubresourceRange *range)
307 {
308    return range->layerCount == VK_REMAINING_ARRAY_LAYERS ?
309       image->bo->array_size - range->baseArrayLayer : range->layerCount;
310 }
311 
312 static inline uint32_t
lvp_get_levelCount(const struct lvp_image * image,const VkImageSubresourceRange * range)313 lvp_get_levelCount(const struct lvp_image *image,
314                    const VkImageSubresourceRange *range)
315 {
316    return range->levelCount == VK_REMAINING_MIP_LEVELS ?
317       (image->bo->last_level + 1) - range->baseMipLevel : range->levelCount;
318 }
319 
320 struct lvp_image_create_info {
321    const VkImageCreateInfo *vk_info;
322    uint32_t bind_flags;
323    uint32_t stride;
324 };
325 
326 VkResult
327 lvp_image_create(VkDevice _device,
328                  const struct lvp_image_create_info *create_info,
329                  const VkAllocationCallbacks* alloc,
330                  VkImage *pImage);
331 
332 struct lvp_image_view {
333    struct vk_object_base base;
334    const struct lvp_image *image; /**< VkImageViewCreateInfo::image */
335 
336    VkImageViewType view_type;
337    VkFormat format;
338    enum pipe_format pformat;
339    VkComponentMapping components;
340    VkImageSubresourceRange subresourceRange;
341 
342    struct pipe_surface *surface; /* have we created a pipe surface for this? */
343 };
344 
345 struct lvp_subpass_attachment {
346    uint32_t         attachment;
347    VkImageLayout    layout;
348    bool             in_render_loop;
349 };
350 
351 struct lvp_subpass {
352    uint32_t                                     attachment_count;
353    struct lvp_subpass_attachment *             attachments;
354 
355    uint32_t                                     input_count;
356    uint32_t                                     color_count;
357    struct lvp_subpass_attachment *              input_attachments;
358    struct lvp_subpass_attachment *              color_attachments;
359    struct lvp_subpass_attachment *              resolve_attachments;
360    struct lvp_subpass_attachment *              depth_stencil_attachment;
361    struct lvp_subpass_attachment *              ds_resolve_attachment;
362 
363    /** Subpass has at least one color resolve attachment */
364    bool                                         has_color_resolve;
365 
366    /** Subpass has at least one color attachment */
367    bool                                         has_color_att;
368 
369    VkSampleCountFlagBits                        max_sample_count;
370 };
371 
372 struct lvp_render_pass_attachment {
373    VkFormat                                     format;
374    uint32_t                                     samples;
375    VkAttachmentLoadOp                           load_op;
376    VkAttachmentLoadOp                           stencil_load_op;
377    VkImageLayout                                initial_layout;
378    VkImageLayout                                final_layout;
379 
380    /* The subpass id in which the attachment will be used first/last. */
381    uint32_t                                     first_subpass_idx;
382    uint32_t                                     last_subpass_idx;
383 };
384 
385 struct lvp_render_pass {
386    struct vk_object_base                        base;
387    uint32_t                                     attachment_count;
388    uint32_t                                     subpass_count;
389    struct lvp_subpass_attachment *              subpass_attachments;
390    struct lvp_render_pass_attachment *          attachments;
391    struct lvp_subpass                           subpasses[0];
392 };
393 
394 struct lvp_sampler {
395    struct vk_object_base base;
396    VkSamplerCreateInfo create_info;
397    uint32_t state[4];
398 };
399 
400 struct lvp_framebuffer {
401    struct vk_object_base                        base;
402    uint32_t                                     width;
403    uint32_t                                     height;
404    uint32_t                                     layers;
405 
406    uint32_t                                     attachment_count;
407    struct lvp_image_view *                      attachments[0];
408 };
409 
410 struct lvp_descriptor_set_binding_layout {
411    uint16_t descriptor_index;
412    /* Number of array elements in this binding */
413    VkDescriptorType type;
414    uint16_t array_size;
415    bool valid;
416 
417    int16_t dynamic_index;
418    struct {
419       int16_t const_buffer_index;
420       int16_t shader_buffer_index;
421       int16_t sampler_index;
422       int16_t sampler_view_index;
423       int16_t image_index;
424    } stage[MESA_SHADER_STAGES];
425 
426    /* Immutable samplers (or NULL if no immutable samplers) */
427    struct lvp_sampler **immutable_samplers;
428 };
429 
430 struct lvp_descriptor_set_layout {
431    struct vk_object_base base;
432    /* Number of bindings in this descriptor set */
433    uint16_t binding_count;
434 
435    /* Total size of the descriptor set with room for all array entries */
436    uint16_t size;
437 
438    /* Shader stages affected by this descriptor set */
439    uint16_t shader_stages;
440 
441    struct {
442       uint16_t const_buffer_count;
443       uint16_t shader_buffer_count;
444       uint16_t sampler_count;
445       uint16_t sampler_view_count;
446       uint16_t image_count;
447    } stage[MESA_SHADER_STAGES];
448 
449    /* Number of dynamic offsets used by this descriptor set */
450    uint16_t dynamic_offset_count;
451 
452    /* Bindings in this descriptor set */
453    struct lvp_descriptor_set_binding_layout binding[0];
454 };
455 
456 struct lvp_descriptor {
457    VkDescriptorType type;
458 
459    union {
460       struct {
461          struct lvp_image_view *image_view;
462          struct lvp_sampler *sampler;
463       };
464       struct {
465          uint64_t offset;
466          uint64_t range;
467          struct lvp_buffer *buffer;
468       } buf;
469       struct lvp_buffer_view *buffer_view;
470    };
471 };
472 
473 struct lvp_descriptor_set {
474    struct vk_object_base base;
475    const struct lvp_descriptor_set_layout *layout;
476    struct list_head link;
477    struct lvp_descriptor descriptors[0];
478 };
479 
480 struct lvp_descriptor_pool {
481    struct vk_object_base base;
482    VkDescriptorPoolCreateFlags flags;
483    uint32_t max_sets;
484 
485    struct list_head sets;
486 };
487 
488 VkResult
489 lvp_descriptor_set_create(struct lvp_device *device,
490                           const struct lvp_descriptor_set_layout *layout,
491                           struct lvp_descriptor_set **out_set);
492 
493 void
494 lvp_descriptor_set_destroy(struct lvp_device *device,
495                            struct lvp_descriptor_set *set);
496 
497 struct lvp_pipeline_layout {
498    struct vk_object_base base;
499    struct {
500       struct lvp_descriptor_set_layout *layout;
501       uint32_t dynamic_offset_start;
502    } set[MAX_SETS];
503 
504    uint32_t num_sets;
505    uint32_t push_constant_size;
506    struct {
507       bool has_dynamic_offsets;
508    } stage[MESA_SHADER_STAGES];
509 };
510 
511 struct lvp_pipeline {
512    struct vk_object_base base;
513    struct lvp_device *                          device;
514    struct lvp_pipeline_layout *                 layout;
515 
516    bool is_compute_pipeline;
517    bool force_min_sample;
518    nir_shader *pipeline_nir[MESA_SHADER_STAGES];
519    void *shader_cso[PIPE_SHADER_TYPES];
520    VkGraphicsPipelineCreateInfo graphics_create_info;
521    VkComputePipelineCreateInfo compute_create_info;
522 };
523 
524 struct lvp_event {
525    struct vk_object_base base;
526    uint64_t event_storage;
527 };
528 
529 struct lvp_fence {
530    struct vk_object_base base;
531    bool signaled;
532    struct pipe_fence_handle *handle;
533 };
534 
535 struct lvp_semaphore {
536    struct vk_object_base base;
537    bool dummy;
538 };
539 
540 struct lvp_buffer {
541    struct vk_object_base base;
542    struct lvp_device *                          device;
543    VkDeviceSize                                 size;
544 
545    VkBufferUsageFlags                           usage;
546    VkDeviceSize                                 offset;
547 
548    struct pipe_resource *bo;
549    uint64_t total_size;
550 };
551 
552 struct lvp_buffer_view {
553    struct vk_object_base base;
554    VkFormat format;
555    enum pipe_format pformat;
556    struct lvp_buffer *buffer;
557    uint32_t offset;
558    uint64_t range;
559 };
560 
561 struct lvp_query_pool {
562    struct vk_object_base base;
563    VkQueryType type;
564    uint32_t count;
565    VkQueryPipelineStatisticFlags pipeline_stats;
566    enum pipe_query_type base_type;
567    struct pipe_query *queries[0];
568 };
569 
570 struct lvp_cmd_pool {
571    struct vk_object_base                        base;
572    VkAllocationCallbacks                        alloc;
573    struct list_head                             cmd_buffers;
574    struct list_head                             free_cmd_buffers;
575 };
576 
577 
578 enum lvp_cmd_buffer_status {
579    LVP_CMD_BUFFER_STATUS_INVALID,
580    LVP_CMD_BUFFER_STATUS_INITIAL,
581    LVP_CMD_BUFFER_STATUS_RECORDING,
582    LVP_CMD_BUFFER_STATUS_EXECUTABLE,
583    LVP_CMD_BUFFER_STATUS_PENDING,
584 };
585 
586 struct lvp_cmd_buffer {
587    struct vk_object_base base;
588 
589    struct lvp_device *                          device;
590 
591    VkCommandBufferLevel                         level;
592    enum lvp_cmd_buffer_status status;
593    struct lvp_cmd_pool *                        pool;
594    struct list_head                             pool_link;
595 
596    struct list_head                             cmds;
597 
598    uint8_t push_constants[MAX_PUSH_CONSTANTS_SIZE];
599 };
600 
601 /* in same order and buffer building commands in spec. */
602 enum lvp_cmds {
603    LVP_CMD_BIND_PIPELINE,
604    LVP_CMD_SET_VIEWPORT,
605    LVP_CMD_SET_SCISSOR,
606    LVP_CMD_SET_LINE_WIDTH,
607    LVP_CMD_SET_DEPTH_BIAS,
608    LVP_CMD_SET_BLEND_CONSTANTS,
609    LVP_CMD_SET_DEPTH_BOUNDS,
610    LVP_CMD_SET_STENCIL_COMPARE_MASK,
611    LVP_CMD_SET_STENCIL_WRITE_MASK,
612    LVP_CMD_SET_STENCIL_REFERENCE,
613    LVP_CMD_BIND_DESCRIPTOR_SETS,
614    LVP_CMD_BIND_INDEX_BUFFER,
615    LVP_CMD_BIND_VERTEX_BUFFERS,
616    LVP_CMD_DRAW,
617    LVP_CMD_DRAW_INDEXED,
618    LVP_CMD_DRAW_INDIRECT,
619    LVP_CMD_DRAW_INDEXED_INDIRECT,
620    LVP_CMD_DISPATCH,
621    LVP_CMD_DISPATCH_INDIRECT,
622    LVP_CMD_COPY_BUFFER,
623    LVP_CMD_COPY_IMAGE,
624    LVP_CMD_BLIT_IMAGE,
625    LVP_CMD_COPY_BUFFER_TO_IMAGE,
626    LVP_CMD_COPY_IMAGE_TO_BUFFER,
627    LVP_CMD_UPDATE_BUFFER,
628    LVP_CMD_FILL_BUFFER,
629    LVP_CMD_CLEAR_COLOR_IMAGE,
630    LVP_CMD_CLEAR_DEPTH_STENCIL_IMAGE,
631    LVP_CMD_CLEAR_ATTACHMENTS,
632    LVP_CMD_RESOLVE_IMAGE,
633    LVP_CMD_SET_EVENT,
634    LVP_CMD_RESET_EVENT,
635    LVP_CMD_WAIT_EVENTS,
636    LVP_CMD_PIPELINE_BARRIER,
637    LVP_CMD_BEGIN_QUERY,
638    LVP_CMD_END_QUERY,
639    LVP_CMD_RESET_QUERY_POOL,
640    LVP_CMD_WRITE_TIMESTAMP,
641    LVP_CMD_COPY_QUERY_POOL_RESULTS,
642    LVP_CMD_PUSH_CONSTANTS,
643    LVP_CMD_BEGIN_RENDER_PASS,
644    LVP_CMD_NEXT_SUBPASS,
645    LVP_CMD_END_RENDER_PASS,
646    LVP_CMD_EXECUTE_COMMANDS,
647 };
648 
649 struct lvp_cmd_bind_pipeline {
650    VkPipelineBindPoint bind_point;
651    struct lvp_pipeline *pipeline;
652 };
653 
654 struct lvp_cmd_set_viewport {
655    uint32_t first_viewport;
656    uint32_t viewport_count;
657    VkViewport viewports[16];
658 };
659 
660 struct lvp_cmd_set_scissor {
661    uint32_t first_scissor;
662    uint32_t scissor_count;
663    VkRect2D scissors[16];
664 };
665 
666 struct lvp_cmd_set_line_width {
667    float line_width;
668 };
669 
670 struct lvp_cmd_set_depth_bias {
671    float constant_factor;
672    float clamp;
673    float slope_factor;
674 };
675 
676 struct lvp_cmd_set_blend_constants {
677    float blend_constants[4];
678 };
679 
680 struct lvp_cmd_set_depth_bounds {
681    float min_depth;
682    float max_depth;
683 };
684 
685 struct lvp_cmd_set_stencil_vals {
686    VkStencilFaceFlags face_mask;
687    uint32_t value;
688 };
689 
690 struct lvp_cmd_bind_descriptor_sets {
691    VkPipelineBindPoint bind_point;
692    struct lvp_pipeline_layout *layout;
693    uint32_t first;
694    uint32_t count;
695    struct lvp_descriptor_set **sets;
696    uint32_t dynamic_offset_count;
697    const uint32_t *dynamic_offsets;
698 };
699 
700 struct lvp_cmd_bind_index_buffer {
701    const struct lvp_buffer *buffer;
702    VkDeviceSize offset;
703    VkIndexType index_type;
704 };
705 
706 struct lvp_cmd_bind_vertex_buffers {
707    uint32_t first;
708    uint32_t binding_count;
709    struct lvp_buffer **buffers;
710    const VkDeviceSize *offsets;
711 };
712 
713 struct lvp_cmd_draw {
714    uint32_t vertex_count;
715    uint32_t instance_count;
716    uint32_t first_vertex;
717    uint32_t first_instance;
718 };
719 
720 struct lvp_cmd_draw_indexed {
721    uint32_t index_count;
722    uint32_t instance_count;
723    uint32_t first_index;
724    uint32_t vertex_offset;
725    uint32_t first_instance;
726 };
727 
728 struct lvp_cmd_draw_indirect {
729    VkDeviceSize offset;
730    struct lvp_buffer *buffer;
731    uint32_t draw_count;
732    uint32_t stride;
733 };
734 
735 struct lvp_cmd_dispatch {
736    uint32_t x;
737    uint32_t y;
738    uint32_t z;
739 };
740 
741 struct lvp_cmd_dispatch_indirect {
742    const struct lvp_buffer *buffer;
743    VkDeviceSize offset;
744 };
745 
746 struct lvp_cmd_copy_buffer {
747    struct lvp_buffer *src;
748    struct lvp_buffer *dst;
749    uint32_t region_count;
750    const VkBufferCopy *regions;
751 };
752 
753 struct lvp_cmd_copy_image {
754    struct lvp_image *src;
755    struct lvp_image *dst;
756    VkImageLayout src_layout;
757    VkImageLayout dst_layout;
758    uint32_t region_count;
759    const VkImageCopy *regions;
760 };
761 
762 struct lvp_cmd_blit_image {
763   struct lvp_image *src;
764   struct lvp_image *dst;
765   VkImageLayout src_layout;
766   VkImageLayout dst_layout;
767   uint32_t region_count;
768   const VkImageBlit *regions;
769   VkFilter filter;
770 };
771 
772 struct lvp_cmd_copy_buffer_to_image {
773    struct lvp_buffer *src;
774    struct lvp_image *dst;
775    VkImageLayout dst_layout;
776    uint32_t region_count;
777    const VkBufferImageCopy *regions;
778 };
779 
780 struct lvp_cmd_copy_image_to_buffer {
781    struct lvp_image *src;
782    struct lvp_buffer *dst;
783    VkImageLayout src_layout;
784    uint32_t region_count;
785    const VkBufferImageCopy *regions;
786 };
787 
788 struct lvp_cmd_update_buffer {
789    struct lvp_buffer *buffer;
790    VkDeviceSize offset;
791    VkDeviceSize data_size;
792    char data[0];
793 };
794 
795 struct lvp_cmd_fill_buffer {
796    struct lvp_buffer *buffer;
797    VkDeviceSize offset;
798    VkDeviceSize fill_size;
799    uint32_t data;
800 };
801 
802 struct lvp_cmd_clear_color_image {
803    struct lvp_image *image;
804    VkImageLayout layout;
805    VkClearColorValue clear_val;
806    uint32_t range_count;
807    VkImageSubresourceRange *ranges;
808 };
809 
810 struct lvp_cmd_clear_ds_image {
811    struct lvp_image *image;
812    VkImageLayout layout;
813    VkClearDepthStencilValue clear_val;
814    uint32_t range_count;
815    VkImageSubresourceRange *ranges;
816 };
817 
818 struct lvp_cmd_clear_attachments {
819    uint32_t attachment_count;
820    VkClearAttachment *attachments;
821    uint32_t rect_count;
822    VkClearRect *rects;
823 };
824 
825 struct lvp_cmd_resolve_image {
826    struct lvp_image *src;
827    struct lvp_image *dst;
828    VkImageLayout src_layout;
829    VkImageLayout dst_layout;
830    uint32_t region_count;
831    VkImageResolve *regions;
832 };
833 
834 struct lvp_cmd_event_set {
835    struct lvp_event *event;
836    bool value;
837    bool flush;
838 };
839 
840 struct lvp_cmd_wait_events {
841    uint32_t event_count;
842    struct lvp_event **events;
843    VkPipelineStageFlags src_stage_mask;
844    VkPipelineStageFlags dst_stage_mask;
845    uint32_t memory_barrier_count;
846    VkMemoryBarrier *memory_barriers;
847    uint32_t buffer_memory_barrier_count;
848    VkBufferMemoryBarrier *buffer_memory_barriers;
849    uint32_t image_memory_barrier_count;
850    VkImageMemoryBarrier *image_memory_barriers;
851 };
852 
853 struct lvp_cmd_pipeline_barrier {
854    VkPipelineStageFlags src_stage_mask;
855    VkPipelineStageFlags dst_stage_mask;
856    bool by_region;
857    uint32_t memory_barrier_count;
858    VkMemoryBarrier *memory_barriers;
859    uint32_t buffer_memory_barrier_count;
860    VkBufferMemoryBarrier *buffer_memory_barriers;
861    uint32_t image_memory_barrier_count;
862    VkImageMemoryBarrier *image_memory_barriers;
863 };
864 
865 struct lvp_cmd_query_cmd {
866    struct lvp_query_pool *pool;
867    uint32_t query;
868    uint32_t index;
869    bool precise;
870    bool flush;
871 };
872 
873 struct lvp_cmd_copy_query_pool_results {
874    struct lvp_query_pool *pool;
875    uint32_t first_query;
876    uint32_t query_count;
877    struct lvp_buffer *dst;
878    VkDeviceSize dst_offset;
879    VkDeviceSize stride;
880    VkQueryResultFlags flags;
881 };
882 
883 struct lvp_cmd_push_constants {
884    VkShaderStageFlags stage;
885    uint32_t offset;
886    uint32_t size;
887    uint32_t val[1];
888 };
889 
890 struct lvp_attachment_state {
891    VkImageAspectFlags pending_clear_aspects;
892    VkClearValue clear_value;
893 };
894 
895 struct lvp_cmd_begin_render_pass {
896    struct lvp_framebuffer *framebuffer;
897    struct lvp_render_pass *render_pass;
898    VkRect2D render_area;
899    struct lvp_attachment_state *attachments;
900 };
901 
902 struct lvp_cmd_next_subpass {
903    VkSubpassContents contents;
904 };
905 
906 struct lvp_cmd_execute_commands {
907    uint32_t command_buffer_count;
908    struct lvp_cmd_buffer *cmd_buffers[0];
909 };
910 
911 struct lvp_cmd_buffer_entry {
912    struct list_head cmd_link;
913    uint32_t cmd_type;
914    union {
915       struct lvp_cmd_bind_pipeline pipeline;
916       struct lvp_cmd_set_viewport set_viewport;
917       struct lvp_cmd_set_scissor set_scissor;
918       struct lvp_cmd_set_line_width set_line_width;
919       struct lvp_cmd_set_depth_bias set_depth_bias;
920       struct lvp_cmd_set_blend_constants set_blend_constants;
921       struct lvp_cmd_set_depth_bounds set_depth_bounds;
922       struct lvp_cmd_set_stencil_vals stencil_vals;
923       struct lvp_cmd_bind_descriptor_sets descriptor_sets;
924       struct lvp_cmd_bind_vertex_buffers vertex_buffers;
925       struct lvp_cmd_bind_index_buffer index_buffer;
926       struct lvp_cmd_draw draw;
927       struct lvp_cmd_draw_indexed draw_indexed;
928       struct lvp_cmd_draw_indirect draw_indirect;
929       struct lvp_cmd_dispatch dispatch;
930       struct lvp_cmd_dispatch_indirect dispatch_indirect;
931       struct lvp_cmd_copy_buffer copy_buffer;
932       struct lvp_cmd_copy_image copy_image;
933       struct lvp_cmd_blit_image blit_image;
934       struct lvp_cmd_copy_buffer_to_image buffer_to_img;
935       struct lvp_cmd_copy_image_to_buffer img_to_buffer;
936       struct lvp_cmd_update_buffer update_buffer;
937       struct lvp_cmd_fill_buffer fill_buffer;
938       struct lvp_cmd_clear_color_image clear_color_image;
939       struct lvp_cmd_clear_ds_image clear_ds_image;
940       struct lvp_cmd_clear_attachments clear_attachments;
941       struct lvp_cmd_resolve_image resolve_image;
942       struct lvp_cmd_event_set event_set;
943       struct lvp_cmd_wait_events wait_events;
944       struct lvp_cmd_pipeline_barrier pipeline_barrier;
945       struct lvp_cmd_query_cmd query;
946       struct lvp_cmd_copy_query_pool_results copy_query_pool_results;
947       struct lvp_cmd_push_constants push_constants;
948       struct lvp_cmd_begin_render_pass begin_render_pass;
949       struct lvp_cmd_next_subpass next_subpass;
950       struct lvp_cmd_execute_commands execute_commands;
951    } u;
952 };
953 
954 VkResult lvp_execute_cmds(struct lvp_device *device,
955                           struct lvp_queue *queue,
956                           struct lvp_fence *fence,
957                           struct lvp_cmd_buffer *cmd_buffer);
958 
959 enum pipe_format vk_format_to_pipe(VkFormat format);
960 
961 static inline VkImageAspectFlags
vk_format_aspects(VkFormat format)962 vk_format_aspects(VkFormat format)
963 {
964    switch (format) {
965    case VK_FORMAT_UNDEFINED:
966       return 0;
967 
968    case VK_FORMAT_S8_UINT:
969       return VK_IMAGE_ASPECT_STENCIL_BIT;
970 
971    case VK_FORMAT_D16_UNORM_S8_UINT:
972    case VK_FORMAT_D24_UNORM_S8_UINT:
973    case VK_FORMAT_D32_SFLOAT_S8_UINT:
974       return VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
975 
976    case VK_FORMAT_D16_UNORM:
977    case VK_FORMAT_X8_D24_UNORM_PACK32:
978    case VK_FORMAT_D32_SFLOAT:
979       return VK_IMAGE_ASPECT_DEPTH_BIT;
980 
981    default:
982       return VK_IMAGE_ASPECT_COLOR_BIT;
983    }
984 }
985 
986 #ifdef __cplusplus
987 }
988 #endif
989