• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2019 Red Hat.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 
24 #pragma once
25 #include <stdlib.h>
26 #include <stdio.h>
27 #include <stdbool.h>
28 #include <string.h>
29 #include <assert.h>
30 #include <stdint.h>
31 
32 #include <llvm/Config/llvm-config.h>
33 
34 #include "util/macros.h"
35 #include "util/list.h"
36 #include "util/u_dynarray.h"
37 #include "util/simple_mtx.h"
38 #include "util/u_queue.h"
39 #include "util/u_upload_mgr.h"
40 
41 #include "compiler/shader_enums.h"
42 #include "pipe/p_screen.h"
43 #include "pipe/p_state.h"
44 #include "cso_cache/cso_context.h"
45 #include "nir.h"
46 
47 /* Pre-declarations needed for WSI entrypoints */
48 struct wl_surface;
49 struct wl_display;
50 typedef struct xcb_connection_t xcb_connection_t;
51 typedef uint32_t xcb_visualid_t;
52 typedef uint32_t xcb_window_t;
53 
54 #define VK_PROTOTYPES
55 #include <vulkan/vulkan.h>
56 #include <vulkan/vk_icd.h>
57 
58 #include "lvp_entrypoints.h"
59 #include "vk_device.h"
60 #include "vk_instance.h"
61 #include "vk_image.h"
62 #include "vk_log.h"
63 #include "vk_physical_device.h"
64 #include "vk_shader_module.h"
65 #include "vk_util.h"
66 #include "vk_format.h"
67 #include "vk_cmd_queue.h"
68 #include "vk_command_buffer.h"
69 #include "vk_command_pool.h"
70 #include "vk_descriptor_set_layout.h"
71 #include "vk_graphics_state.h"
72 #include "vk_pipeline_layout.h"
73 #include "vk_queue.h"
74 #include "vk_sync.h"
75 #include "vk_sync_timeline.h"
76 
77 #include "wsi_common.h"
78 
79 #include <assert.h>
80 #ifdef __cplusplus
81 extern "C" {
82 #endif
83 
84 #define MAX_SETS         8
85 #define MAX_PUSH_CONSTANTS_SIZE 128
86 #define MAX_PUSH_DESCRIPTORS 32
87 #define MAX_DESCRIPTOR_UNIFORM_BLOCK_SIZE 4096
88 #define MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS 8
89 
90 #ifdef _WIN32
91 #define lvp_printflike(a, b)
92 #else
93 #define lvp_printflike(a, b) __attribute__((__format__(__printf__, a, b)))
94 #endif
95 
96 int lvp_get_instance_entrypoint_index(const char *name);
97 int lvp_get_device_entrypoint_index(const char *name);
98 int lvp_get_physical_device_entrypoint_index(const char *name);
99 
100 const char *lvp_get_instance_entry_name(int index);
101 const char *lvp_get_physical_device_entry_name(int index);
102 const char *lvp_get_device_entry_name(int index);
103 
104 bool lvp_instance_entrypoint_is_enabled(int index, uint32_t core_version,
105                                          const struct vk_instance_extension_table *instance);
106 bool lvp_physical_device_entrypoint_is_enabled(int index, uint32_t core_version,
107                                                 const struct vk_instance_extension_table *instance);
108 bool lvp_device_entrypoint_is_enabled(int index, uint32_t core_version,
109                                        const struct vk_instance_extension_table *instance,
110                                        const struct vk_device_extension_table *device);
111 
112 #define LVP_DEBUG_ALL_ENTRYPOINTS (1 << 0)
113 
114 void __lvp_finishme(const char *file, int line, const char *format, ...)
115    lvp_printflike(3, 4);
116 
117 #define lvp_finishme(format, ...) \
118    __lvp_finishme(__FILE__, __LINE__, format, ##__VA_ARGS__);
119 
120 #define stub_return(v) \
121    do { \
122       lvp_finishme("stub %s", __func__); \
123       return (v); \
124    } while (0)
125 
126 #define stub() \
127    do { \
128       lvp_finishme("stub %s", __func__); \
129       return; \
130    } while (0)
131 
132 #define LVP_STAGE_MASK ((1 << MESA_SHADER_STAGES) - 1)
133 
134 #define lvp_foreach_stage(stage, stage_bits)                         \
135    for (gl_shader_stage stage,                                       \
136         __tmp = (gl_shader_stage)((stage_bits) & LVP_STAGE_MASK);    \
137         stage = ffs(__tmp) - 1, __tmp;                     \
138         __tmp &= ~(1 << (stage)))
139 
140 struct lvp_physical_device {
141    struct vk_physical_device vk;
142 
143    struct pipe_loader_device *pld;
144    struct pipe_screen *pscreen;
145    uint32_t max_images;
146 
147    struct vk_sync_timeline_type sync_timeline_type;
148    const struct vk_sync_type *sync_types[3];
149 
150    VkPhysicalDeviceLimits device_limits;
151 
152    struct wsi_device                       wsi_device;
153 };
154 
155 struct lvp_instance {
156    struct vk_instance vk;
157 
158    uint32_t apiVersion;
159    int physicalDeviceCount;
160    struct lvp_physical_device physicalDevice;
161 
162    uint64_t debug_flags;
163 
164    struct pipe_loader_device *devs;
165    int num_devices;
166 };
167 
168 VkResult lvp_init_wsi(struct lvp_physical_device *physical_device);
169 void lvp_finish_wsi(struct lvp_physical_device *physical_device);
170 
171 bool lvp_physical_device_extension_supported(struct lvp_physical_device *dev,
172                                               const char *name);
173 
174 struct lvp_queue {
175    struct vk_queue vk;
176    struct lvp_device *                         device;
177    struct pipe_context *ctx;
178    struct cso_context *cso;
179    struct u_upload_mgr *uploader;
180    struct pipe_fence_handle *last_fence;
181    void *state;
182    struct util_dynarray pipeline_destroys;
183    simple_mtx_t pipeline_lock;
184 };
185 
186 struct lvp_pipeline_cache {
187    struct vk_object_base                        base;
188    struct lvp_device *                          device;
189    VkAllocationCallbacks                        alloc;
190 };
191 
192 struct lvp_device {
193    struct vk_device vk;
194 
195    struct lvp_queue queue;
196    struct lvp_instance *                       instance;
197    struct lvp_physical_device *physical_device;
198    struct pipe_screen *pscreen;
199    bool poison_mem;
200 };
201 
202 void lvp_device_get_cache_uuid(void *uuid);
203 
204 enum lvp_device_memory_type {
205    LVP_DEVICE_MEMORY_TYPE_DEFAULT,
206    LVP_DEVICE_MEMORY_TYPE_USER_PTR,
207    LVP_DEVICE_MEMORY_TYPE_OPAQUE_FD,
208 };
209 
210 struct lvp_device_memory {
211    struct vk_object_base base;
212    struct pipe_memory_allocation *pmem;
213    uint32_t                                     type_index;
214    VkDeviceSize                                 map_size;
215    void *                                       map;
216    enum lvp_device_memory_type memory_type;
217    int                                          backed_fd;
218 };
219 
220 struct lvp_pipe_sync {
221    struct vk_sync base;
222 
223    mtx_t lock;
224    cnd_t changed;
225 
226    bool signaled;
227    struct pipe_fence_handle *fence;
228 };
229 
230 extern const struct vk_sync_type lvp_pipe_sync_type;
231 
232 void lvp_pipe_sync_signal_with_fence(struct lvp_device *device,
233                                      struct lvp_pipe_sync *sync,
234                                      struct pipe_fence_handle *fence);
235 
236 static inline struct lvp_pipe_sync *
vk_sync_as_lvp_pipe_sync(struct vk_sync * sync)237 vk_sync_as_lvp_pipe_sync(struct vk_sync *sync)
238 {
239    assert(sync->type == &lvp_pipe_sync_type);
240    return container_of(sync, struct lvp_pipe_sync, base);
241 }
242 
243 struct lvp_image {
244    struct vk_image vk;
245    VkDeviceSize size;
246    uint32_t alignment;
247    struct pipe_memory_allocation *pmem;
248    unsigned memory_offset;
249    struct pipe_resource *bo;
250 };
251 
252 struct lvp_image_view {
253    struct vk_image_view vk;
254    const struct lvp_image *image; /**< VkImageViewCreateInfo::image */
255 
256    enum pipe_format pformat;
257 
258    struct pipe_surface *surface; /* have we created a pipe surface for this? */
259    struct lvp_image_view *multisample; //VK_EXT_multisampled_render_to_single_sampled
260 };
261 
262 struct lvp_sampler {
263    struct vk_object_base base;
264    VkSamplerCreateInfo create_info;
265    union pipe_color_union border_color;
266    VkSamplerReductionMode reduction_mode;
267    uint32_t state[4];
268 };
269 
270 struct lvp_descriptor_set_binding_layout {
271    uint16_t descriptor_index;
272    /* Number of array elements in this binding */
273    VkDescriptorType type;
274    uint16_t array_size;
275    bool valid;
276 
277    int16_t dynamic_index;
278    struct {
279       int16_t const_buffer_index;
280       int16_t shader_buffer_index;
281       int16_t sampler_index;
282       int16_t sampler_view_index;
283       int16_t image_index;
284       int16_t uniform_block_index;
285       int16_t uniform_block_offset;
286    } stage[MESA_SHADER_STAGES];
287 
288    /* Immutable samplers (or NULL if no immutable samplers) */
289    struct lvp_sampler **immutable_samplers;
290 };
291 
292 struct lvp_descriptor_set_layout {
293    struct vk_descriptor_set_layout vk;
294 
295    /* add new members after this */
296 
297    uint32_t immutable_sampler_count;
298 
299    /* Number of bindings in this descriptor set */
300    uint16_t binding_count;
301 
302    /* Total size of the descriptor set with room for all array entries */
303    uint16_t size;
304 
305    /* Shader stages affected by this descriptor set */
306    uint16_t shader_stages;
307 
308    struct {
309       uint16_t const_buffer_count;
310       uint16_t shader_buffer_count;
311       uint16_t sampler_count;
312       uint16_t sampler_view_count;
313       uint16_t image_count;
314       uint16_t uniform_block_count;
315       uint16_t uniform_block_size;
316       uint16_t uniform_block_sizes[MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS]; //zero-indexed
317    } stage[MESA_SHADER_STAGES];
318 
319    /* Number of dynamic offsets used by this descriptor set */
320    uint16_t dynamic_offset_count;
321 
322    /* Bindings in this descriptor set */
323    struct lvp_descriptor_set_binding_layout binding[0];
324 };
325 
326 static inline const struct lvp_descriptor_set_layout *
vk_to_lvp_descriptor_set_layout(const struct vk_descriptor_set_layout * layout)327 vk_to_lvp_descriptor_set_layout(const struct vk_descriptor_set_layout *layout)
328 {
329    return container_of(layout, const struct lvp_descriptor_set_layout, vk);
330 }
331 
332 union lvp_descriptor_info {
333    struct {
334       struct lvp_sampler *sampler;
335       struct lvp_image_view *iview;
336       VkImageLayout image_layout;
337    };
338    struct {
339       struct lvp_buffer *buffer;
340       VkDeviceSize offset;
341       VkDeviceSize range;
342    };
343    struct lvp_buffer_view *buffer_view;
344    uint8_t *uniform;
345 };
346 
347 struct lvp_descriptor {
348    VkDescriptorType type;
349 
350    union lvp_descriptor_info info;
351 };
352 
353 struct lvp_descriptor_set {
354    struct vk_object_base base;
355    struct lvp_descriptor_set_layout *layout;
356    struct list_head link;
357    struct lvp_descriptor descriptors[0];
358 };
359 
360 struct lvp_descriptor_pool {
361    struct vk_object_base base;
362    VkDescriptorPoolCreateFlags flags;
363    uint32_t max_sets;
364 
365    struct list_head sets;
366 };
367 
368 struct lvp_descriptor_update_template {
369    struct vk_object_base base;
370    unsigned ref_cnt;
371    uint32_t entry_count;
372    uint32_t set;
373    VkDescriptorUpdateTemplateType type;
374    VkPipelineBindPoint bind_point;
375    struct lvp_pipeline_layout *pipeline_layout;
376    VkDescriptorUpdateTemplateEntry entry[0];
377 };
378 
379 static inline void
lvp_descriptor_template_templ_ref(struct lvp_descriptor_update_template * templ)380 lvp_descriptor_template_templ_ref(struct lvp_descriptor_update_template *templ)
381 {
382    assert(templ && templ->ref_cnt >= 1);
383    p_atomic_inc(&templ->ref_cnt);
384 }
385 
386 void
387 lvp_descriptor_template_destroy(struct lvp_device *device, struct lvp_descriptor_update_template *templ);
388 
389 static inline void
lvp_descriptor_template_templ_unref(struct lvp_device * device,struct lvp_descriptor_update_template * templ)390 lvp_descriptor_template_templ_unref(struct lvp_device *device,
391                                     struct lvp_descriptor_update_template *templ)
392 {
393    if (!templ)
394       return;
395    assert(templ->ref_cnt >= 1);
396    if (p_atomic_dec_zero(&templ->ref_cnt))
397       lvp_descriptor_template_destroy(device, templ);
398 }
399 
400 VkResult
401 lvp_descriptor_set_create(struct lvp_device *device,
402                           struct lvp_descriptor_set_layout *layout,
403                           struct lvp_descriptor_set **out_set);
404 
405 void
406 lvp_descriptor_set_destroy(struct lvp_device *device,
407                            struct lvp_descriptor_set *set);
408 
409 struct lvp_pipeline_layout {
410    struct vk_pipeline_layout vk;
411 
412    uint32_t push_constant_size;
413    VkShaderStageFlags push_constant_stages;
414    struct {
415       uint16_t uniform_block_size;
416       uint16_t uniform_block_count;
417       uint16_t uniform_block_sizes[MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS * MAX_SETS];
418    } stage[MESA_SHADER_STAGES];
419 };
420 
421 struct lvp_access_info {
422    uint64_t images_read;
423    uint64_t images_written;
424    uint64_t buffers_written;
425 };
426 
427 struct lvp_pipeline {
428    struct vk_object_base base;
429    struct lvp_device *                          device;
430    struct lvp_pipeline_layout *                 layout;
431 
432    struct lvp_access_info access[MESA_SHADER_STAGES];
433 
434    void *mem_ctx;
435    void *state_data;
436    bool is_compute_pipeline;
437    bool force_min_sample;
438    nir_shader *pipeline_nir[MESA_SHADER_STAGES];
439    void *shader_cso[PIPE_SHADER_TYPES];
440    struct {
441       uint32_t uniform_offsets[PIPE_MAX_CONSTANT_BUFFERS][MAX_INLINABLE_UNIFORMS];
442       uint8_t count[PIPE_MAX_CONSTANT_BUFFERS];
443       bool must_inline;
444       uint32_t can_inline; //bitmask
445    } inlines[MESA_SHADER_STAGES];
446    gl_shader_stage last_vertex;
447    struct pipe_stream_output_info stream_output;
448    struct vk_graphics_pipeline_state graphics_state;
449    VkGraphicsPipelineLibraryFlagsEXT stages;
450    bool line_smooth;
451    bool disable_multisample;
452    bool line_rectangular;
453    bool gs_output_lines;
454    bool library;
455 };
456 
457 struct lvp_event {
458    struct vk_object_base base;
459    volatile uint64_t event_storage;
460 };
461 
462 struct lvp_buffer {
463    struct vk_object_base base;
464 
465    VkDeviceSize                                 size;
466 
467    VkBufferUsageFlags                           usage;
468    VkDeviceSize                                 offset;
469 
470    struct pipe_memory_allocation *pmem;
471    struct pipe_resource *bo;
472    uint64_t total_size;
473 };
474 
475 struct lvp_buffer_view {
476    struct vk_object_base base;
477    VkFormat format;
478    enum pipe_format pformat;
479    struct lvp_buffer *buffer;
480    uint32_t offset;
481    uint64_t range;
482 };
483 
484 struct lvp_query_pool {
485    struct vk_object_base base;
486    VkQueryType type;
487    uint32_t count;
488    VkQueryPipelineStatisticFlags pipeline_stats;
489    enum pipe_query_type base_type;
490    struct pipe_query *queries[0];
491 };
492 
493 struct lvp_cmd_pool {
494    struct vk_command_pool                       vk;
495    struct list_head                             cmd_buffers;
496    struct list_head                             free_cmd_buffers;
497 };
498 
499 
500 enum lvp_cmd_buffer_status {
501    LVP_CMD_BUFFER_STATUS_INVALID,
502    LVP_CMD_BUFFER_STATUS_INITIAL,
503    LVP_CMD_BUFFER_STATUS_RECORDING,
504    LVP_CMD_BUFFER_STATUS_EXECUTABLE,
505    LVP_CMD_BUFFER_STATUS_PENDING,
506 };
507 
508 struct lvp_cmd_buffer {
509    struct vk_command_buffer vk;
510 
511    struct lvp_device *                          device;
512 
513    enum lvp_cmd_buffer_status status;
514    struct lvp_cmd_pool *                        pool;
515    struct list_head                             pool_link;
516 
517    uint8_t push_constants[MAX_PUSH_CONSTANTS_SIZE];
518 };
519 
520 
521 static inline const struct lvp_descriptor_set_layout *
get_set_layout(const struct lvp_pipeline_layout * layout,uint32_t set)522 get_set_layout(const struct lvp_pipeline_layout *layout, uint32_t set)
523 {
524    return container_of(layout->vk.set_layouts[set],
525                        const struct lvp_descriptor_set_layout, vk);
526 }
527 
528 static inline const struct lvp_descriptor_set_binding_layout *
get_binding_layout(const struct lvp_pipeline_layout * layout,uint32_t set,uint32_t binding)529 get_binding_layout(const struct lvp_pipeline_layout *layout,
530                    uint32_t set, uint32_t binding)
531 {
532    return &get_set_layout(layout, set)->binding[binding];
533 }
534 
535 #define LVP_FROM_HANDLE(__lvp_type, __name, __handle) \
536    struct __lvp_type *__name = __lvp_type ## _from_handle(__handle)
537 
538 VK_DEFINE_HANDLE_CASTS(lvp_cmd_buffer, vk.base, VkCommandBuffer,
539                        VK_OBJECT_TYPE_COMMAND_BUFFER)
540 VK_DEFINE_HANDLE_CASTS(lvp_device, vk.base, VkDevice, VK_OBJECT_TYPE_DEVICE)
541 VK_DEFINE_HANDLE_CASTS(lvp_instance, vk.base, VkInstance, VK_OBJECT_TYPE_INSTANCE)
542 VK_DEFINE_HANDLE_CASTS(lvp_physical_device, vk.base, VkPhysicalDevice,
543                        VK_OBJECT_TYPE_PHYSICAL_DEVICE)
544 VK_DEFINE_HANDLE_CASTS(lvp_queue, vk.base, VkQueue, VK_OBJECT_TYPE_QUEUE)
545 
546 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_cmd_pool, vk.base, VkCommandPool,
547                                VK_OBJECT_TYPE_COMMAND_POOL)
548 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_buffer, base, VkBuffer,
549                                VK_OBJECT_TYPE_BUFFER)
550 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_buffer_view, base, VkBufferView,
551                                VK_OBJECT_TYPE_BUFFER_VIEW)
552 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_pool, base, VkDescriptorPool,
553                                VK_OBJECT_TYPE_DESCRIPTOR_POOL)
554 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_set, base, VkDescriptorSet,
555                                VK_OBJECT_TYPE_DESCRIPTOR_SET)
556 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_set_layout, vk.base, VkDescriptorSetLayout,
557                                VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT)
558 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_update_template, base, VkDescriptorUpdateTemplate,
559                                VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE)
560 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_device_memory, base, VkDeviceMemory,
561                                VK_OBJECT_TYPE_DEVICE_MEMORY)
562 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_event, base, VkEvent, VK_OBJECT_TYPE_EVENT)
563 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_image, vk.base, VkImage, VK_OBJECT_TYPE_IMAGE)
564 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_image_view, vk.base, VkImageView,
565                                VK_OBJECT_TYPE_IMAGE_VIEW);
566 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_pipeline_cache, base, VkPipelineCache,
567                                VK_OBJECT_TYPE_PIPELINE_CACHE)
568 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_pipeline, base, VkPipeline,
569                                VK_OBJECT_TYPE_PIPELINE)
570 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_pipeline_layout, vk.base, VkPipelineLayout,
571                                VK_OBJECT_TYPE_PIPELINE_LAYOUT)
572 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_query_pool, base, VkQueryPool,
573                                VK_OBJECT_TYPE_QUERY_POOL)
574 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_sampler, base, VkSampler,
575                                VK_OBJECT_TYPE_SAMPLER)
576 
577 struct lvp_write_descriptor {
578    uint32_t dst_binding;
579    uint32_t dst_array_element;
580    uint32_t descriptor_count;
581    VkDescriptorType descriptor_type;
582 };
583 
584 struct lvp_cmd_push_descriptor_set {
585    VkPipelineBindPoint bind_point;
586    struct lvp_pipeline_layout *layout;
587    uint32_t set;
588    uint32_t descriptor_write_count;
589    struct lvp_write_descriptor *descriptors;
590    union lvp_descriptor_info *infos;
591 };
592 
593 void lvp_add_enqueue_cmd_entrypoints(struct vk_device_dispatch_table *disp);
594 
595 VkResult lvp_execute_cmds(struct lvp_device *device,
596                           struct lvp_queue *queue,
597                           struct lvp_cmd_buffer *cmd_buffer);
598 size_t
599 lvp_get_rendering_state_size(void);
600 struct lvp_image *lvp_swapchain_get_image(VkSwapchainKHR swapchain,
601 					  uint32_t index);
602 
603 static inline enum pipe_format
lvp_vk_format_to_pipe_format(VkFormat format)604 lvp_vk_format_to_pipe_format(VkFormat format)
605 {
606    /* Some formats cause problems with CTS right now.*/
607    if (format == VK_FORMAT_R4G4B4A4_UNORM_PACK16 ||
608        format == VK_FORMAT_R8_SRGB ||
609        format == VK_FORMAT_R8G8_SRGB ||
610        format == VK_FORMAT_R64G64B64A64_SFLOAT ||
611        format == VK_FORMAT_R64_SFLOAT ||
612        format == VK_FORMAT_R64G64_SFLOAT ||
613        format == VK_FORMAT_R64G64B64_SFLOAT ||
614        format == VK_FORMAT_A2R10G10B10_SINT_PACK32 ||
615        format == VK_FORMAT_A2B10G10R10_SINT_PACK32 ||
616        format == VK_FORMAT_G8B8G8R8_422_UNORM ||
617        format == VK_FORMAT_B8G8R8G8_422_UNORM ||
618        format == VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM ||
619        format == VK_FORMAT_G8_B8R8_2PLANE_420_UNORM ||
620        format == VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM ||
621        format == VK_FORMAT_G8_B8R8_2PLANE_422_UNORM ||
622        format == VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM ||
623        format == VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM ||
624        format == VK_FORMAT_G16_B16R16_2PLANE_420_UNORM ||
625        format == VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM ||
626        format == VK_FORMAT_G16_B16R16_2PLANE_422_UNORM ||
627        format == VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM ||
628        format == VK_FORMAT_D16_UNORM_S8_UINT)
629       return PIPE_FORMAT_NONE;
630 
631    return vk_format_to_pipe_format(format);
632 }
633 
634 void
635 lvp_pipeline_destroy(struct lvp_device *device, struct lvp_pipeline *pipeline);
636 
637 void
638 queue_thread_noop(void *data, void *gdata, int thread_index);
639 
640 void
641 lvp_shader_optimize(nir_shader *nir);
642 void *
643 lvp_pipeline_compile_stage(struct lvp_pipeline *pipeline, nir_shader *nir);
644 bool
645 lvp_find_inlinable_uniforms(struct lvp_pipeline *pipeline, nir_shader *shader);
646 void
647 lvp_inline_uniforms(nir_shader *shader, const struct lvp_pipeline *pipeline, const uint32_t *uniform_values, uint32_t ubo);
648 void *
649 lvp_pipeline_compile(struct lvp_pipeline *pipeline, nir_shader *base_nir);
650 #ifdef __cplusplus
651 }
652 #endif
653