• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2022 Collabora Ltd. and Red Hat Inc.
3  * SPDX-License-Identifier: MIT
4  */
5 #include "nvk_format.h"
6 
7 #include "nvk_buffer_view.h"
8 #include "nvk_entrypoints.h"
9 #include "nvk_image.h"
10 #include "nvk_physical_device.h"
11 
12 #include "vk_enum_defines.h"
13 #include "vk_format.h"
14 
15 #include "nvtypes.h"
16 #include "cl902d.h"
17 #include "cl9097.h"
18 #include "cl90c0.h"
19 
20 bool
nvk_format_supports_atomics(const struct nv_device_info * dev,enum pipe_format p_format)21 nvk_format_supports_atomics(const struct nv_device_info *dev,
22                             enum pipe_format p_format)
23 {
24    switch (p_format) {
25    case PIPE_FORMAT_R32_UINT:
26    case PIPE_FORMAT_R32_SINT:
27    case PIPE_FORMAT_R64_UINT:
28    case PIPE_FORMAT_R64_SINT:
29       return true;
30    default:
31       return false;
32    }
33 }
34 
35 #define VA_FMT(vk_fmt, widths, swap_rb, type) \
36    [VK_FORMAT_##vk_fmt] = \
37    { NV9097_SET_VERTEX_ATTRIBUTE_A_COMPONENT_BIT_WIDTHS_##widths, \
38      NV9097_SET_VERTEX_ATTRIBUTE_A_SWAP_R_AND_B_##swap_rb, \
39      NV9097_SET_VERTEX_ATTRIBUTE_A_NUMERICAL_TYPE_NUM_##type }
40 
41 static const struct nvk_va_format nvk_vf_formats[] = {
42    VA_FMT(R8_UNORM,                    R8,               FALSE,   UNORM),
43    VA_FMT(R8_SNORM,                    R8,               FALSE,   SNORM),
44    VA_FMT(R8_USCALED,                  R8,               FALSE,   USCALED),
45    VA_FMT(R8_SSCALED,                  R8,               FALSE,   SSCALED),
46    VA_FMT(R8_UINT,                     R8,               FALSE,   UINT),
47    VA_FMT(R8_SINT,                     R8,               FALSE,   SINT),
48 
49    VA_FMT(R8G8_UNORM,                  R8_G8,            FALSE,   UNORM),
50    VA_FMT(R8G8_SNORM,                  R8_G8,            FALSE,   SNORM),
51    VA_FMT(R8G8_USCALED,                R8_G8,            FALSE,   USCALED),
52    VA_FMT(R8G8_SSCALED,                R8_G8,            FALSE,   SSCALED),
53    VA_FMT(R8G8_UINT,                   R8_G8,            FALSE,   UINT),
54    VA_FMT(R8G8_SINT,                   R8_G8,            FALSE,   SINT),
55 
56    VA_FMT(R8G8B8_UNORM,                R8_G8_B8,         FALSE,   UNORM),
57    VA_FMT(R8G8B8_SNORM,                R8_G8_B8,         FALSE,   SNORM),
58    VA_FMT(R8G8B8_USCALED,              R8_G8_B8,         FALSE,   USCALED),
59    VA_FMT(R8G8B8_SSCALED,              R8_G8_B8,         FALSE,   SSCALED),
60    VA_FMT(R8G8B8_UINT,                 R8_G8_B8,         FALSE,   UINT),
61    VA_FMT(R8G8B8_SINT,                 R8_G8_B8,         FALSE,   SINT),
62 
63    VA_FMT(B8G8R8_UNORM,                R8_G8_B8,         TRUE,    UNORM),
64    VA_FMT(B8G8R8_SNORM,                R8_G8_B8,         TRUE,    SNORM),
65    VA_FMT(B8G8R8_USCALED,              R8_G8_B8,         TRUE,    USCALED),
66    VA_FMT(B8G8R8_SSCALED,              R8_G8_B8,         TRUE,    SSCALED),
67    VA_FMT(B8G8R8_UINT,                 R8_G8_B8,         TRUE,    UINT),
68    VA_FMT(B8G8R8_SINT,                 R8_G8_B8,         TRUE,    SINT),
69 
70    VA_FMT(R8G8B8A8_UNORM,              R8_G8_B8_A8,      FALSE,   UNORM),
71    VA_FMT(R8G8B8A8_SNORM,              R8_G8_B8_A8,      FALSE,   SNORM),
72    VA_FMT(R8G8B8A8_USCALED,            R8_G8_B8_A8,      FALSE,   USCALED),
73    VA_FMT(R8G8B8A8_SSCALED,            R8_G8_B8_A8,      FALSE,   SSCALED),
74    VA_FMT(R8G8B8A8_UINT,               R8_G8_B8_A8,      FALSE,   UINT),
75    VA_FMT(R8G8B8A8_SINT,               R8_G8_B8_A8,      FALSE,   SINT),
76 
77    VA_FMT(B8G8R8A8_UNORM,              R8_G8_B8_A8,      TRUE,   UNORM),
78    VA_FMT(B8G8R8A8_SNORM,              R8_G8_B8_A8,      TRUE,   SNORM),
79    VA_FMT(B8G8R8A8_USCALED,            R8_G8_B8_A8,      TRUE,   USCALED),
80    VA_FMT(B8G8R8A8_SSCALED,            R8_G8_B8_A8,      TRUE,   SSCALED),
81    VA_FMT(B8G8R8A8_UINT,               R8_G8_B8_A8,      TRUE,   UINT),
82    VA_FMT(B8G8R8A8_SINT,               R8_G8_B8_A8,      TRUE,   SINT),
83 
84    VA_FMT(A8B8G8R8_UNORM_PACK32,       R8_G8_B8_A8,      FALSE,  UNORM),
85    VA_FMT(A8B8G8R8_SNORM_PACK32,       R8_G8_B8_A8,      FALSE,  SNORM),
86    VA_FMT(A8B8G8R8_USCALED_PACK32,     R8_G8_B8_A8,      FALSE,  USCALED),
87    VA_FMT(A8B8G8R8_SSCALED_PACK32,     R8_G8_B8_A8,      FALSE,  SSCALED),
88    VA_FMT(A8B8G8R8_UINT_PACK32,        R8_G8_B8_A8,      FALSE,  UINT),
89    VA_FMT(A8B8G8R8_SINT_PACK32,        R8_G8_B8_A8,      FALSE,  SINT),
90 
91    VA_FMT(A2R10G10B10_UNORM_PACK32,    A2B10G10R10,      TRUE,    UNORM),
92    VA_FMT(A2R10G10B10_SNORM_PACK32,    A2B10G10R10,      TRUE,    SNORM),
93    VA_FMT(A2R10G10B10_USCALED_PACK32,  A2B10G10R10,      TRUE,    USCALED),
94    VA_FMT(A2R10G10B10_SSCALED_PACK32,  A2B10G10R10,      TRUE,    SSCALED),
95    VA_FMT(A2R10G10B10_UINT_PACK32,     A2B10G10R10,      TRUE,    UINT),
96    VA_FMT(A2R10G10B10_SINT_PACK32,     A2B10G10R10,      TRUE,    SINT),
97 
98    VA_FMT(A2B10G10R10_UNORM_PACK32,    A2B10G10R10,      FALSE,   UNORM),
99    VA_FMT(A2B10G10R10_SNORM_PACK32,    A2B10G10R10,      FALSE,   SNORM),
100    VA_FMT(A2B10G10R10_USCALED_PACK32,  A2B10G10R10,      FALSE,   USCALED),
101    VA_FMT(A2B10G10R10_SSCALED_PACK32,  A2B10G10R10,      FALSE,   SSCALED),
102    VA_FMT(A2B10G10R10_UINT_PACK32,     A2B10G10R10,      FALSE,   UINT),
103    VA_FMT(A2B10G10R10_SINT_PACK32,     A2B10G10R10,      FALSE,   SINT),
104 
105    VA_FMT(B10G11R11_UFLOAT_PACK32,     B10G11R11,        FALSE,   FLOAT),
106 
107    VA_FMT(R16_UNORM,                   R16,              FALSE,   UNORM),
108    VA_FMT(R16_SNORM,                   R16,              FALSE,   SNORM),
109    VA_FMT(R16_USCALED,                 R16,              FALSE,   USCALED),
110    VA_FMT(R16_SSCALED,                 R16,              FALSE,   SSCALED),
111    VA_FMT(R16_UINT,                    R16,              FALSE,   UINT),
112    VA_FMT(R16_SINT,                    R16,              FALSE,   SINT),
113    VA_FMT(R16_SFLOAT,                  R16,              FALSE,   FLOAT),
114 
115    VA_FMT(R16G16_UNORM,                R16_G16,          FALSE,   UNORM),
116    VA_FMT(R16G16_SNORM,                R16_G16,          FALSE,   SNORM),
117    VA_FMT(R16G16_USCALED,              R16_G16,          FALSE,   USCALED),
118    VA_FMT(R16G16_SSCALED,              R16_G16,          FALSE,   SSCALED),
119    VA_FMT(R16G16_UINT,                 R16_G16,          FALSE,   UINT),
120    VA_FMT(R16G16_SINT,                 R16_G16,          FALSE,   SINT),
121    VA_FMT(R16G16_SFLOAT,               R16_G16,          FALSE,   FLOAT),
122 
123    VA_FMT(R16G16B16_UNORM,             R16_G16_B16,      FALSE,   UNORM),
124    VA_FMT(R16G16B16_SNORM,             R16_G16_B16,      FALSE,   SNORM),
125    VA_FMT(R16G16B16_USCALED,           R16_G16_B16,      FALSE,   USCALED),
126    VA_FMT(R16G16B16_SSCALED,           R16_G16_B16,      FALSE,   SSCALED),
127    VA_FMT(R16G16B16_UINT,              R16_G16_B16,      FALSE,   UINT),
128    VA_FMT(R16G16B16_SINT,              R16_G16_B16,      FALSE,   SINT),
129    VA_FMT(R16G16B16_SFLOAT,            R16_G16_B16,      FALSE,   FLOAT),
130 
131    VA_FMT(R16G16B16A16_UNORM,          R16_G16_B16_A16,  FALSE,   UNORM),
132    VA_FMT(R16G16B16A16_SNORM,          R16_G16_B16_A16,  FALSE,   SNORM),
133    VA_FMT(R16G16B16A16_USCALED,        R16_G16_B16_A16,  FALSE,   USCALED),
134    VA_FMT(R16G16B16A16_SSCALED,        R16_G16_B16_A16,  FALSE,   SSCALED),
135    VA_FMT(R16G16B16A16_UINT,           R16_G16_B16_A16,  FALSE,   UINT),
136    VA_FMT(R16G16B16A16_SINT,           R16_G16_B16_A16,  FALSE,   SINT),
137    VA_FMT(R16G16B16A16_SFLOAT,         R16_G16_B16_A16,  FALSE,   FLOAT),
138 
139    VA_FMT(R32_UINT,                    R32,              FALSE,   UINT),
140    VA_FMT(R32_SINT,                    R32,              FALSE,   SINT),
141    VA_FMT(R32_SFLOAT,                  R32,              FALSE,   FLOAT),
142 
143    VA_FMT(R32G32_UINT,                 R32_G32,          FALSE,   UINT),
144    VA_FMT(R32G32_SINT,                 R32_G32,          FALSE,   SINT),
145    VA_FMT(R32G32_SFLOAT,               R32_G32,          FALSE,   FLOAT),
146 
147    VA_FMT(R32G32B32_UINT,              R32_G32_B32,      FALSE,   UINT),
148    VA_FMT(R32G32B32_SINT,              R32_G32_B32,      FALSE,   SINT),
149    VA_FMT(R32G32B32_SFLOAT,            R32_G32_B32,      FALSE,   FLOAT),
150 
151    VA_FMT(R32G32B32A32_UINT,           R32_G32_B32_A32,  FALSE,   UINT),
152    VA_FMT(R32G32B32A32_SINT,           R32_G32_B32_A32,  FALSE,   SINT),
153    VA_FMT(R32G32B32A32_SFLOAT,         R32_G32_B32_A32,  FALSE,   FLOAT),
154 };
155 
156 #undef VA_FMT
157 
158 const struct nvk_va_format *
nvk_get_va_format(const struct nvk_physical_device * pdev,VkFormat format)159 nvk_get_va_format(const struct nvk_physical_device *pdev, VkFormat format)
160 {
161    if (format >= ARRAY_SIZE(nvk_vf_formats))
162       return NULL;
163 
164    if (nvk_vf_formats[format].bit_widths == 0)
165       return NULL;
166 
167    return &nvk_vf_formats[format];
168 }
169 
170 VKAPI_ATTR void VKAPI_CALL
nvk_GetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)171 nvk_GetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice,
172                                        VkFormat format,
173                                        VkFormatProperties2 *pFormatProperties)
174 {
175    VK_FROM_HANDLE(nvk_physical_device, pdevice, physicalDevice);
176 
177    VkFormatFeatureFlags2 linear2, optimal2, buffer2;
178    linear2 = nvk_get_image_format_features(pdevice, format,
179                                            VK_IMAGE_TILING_LINEAR, 0);
180    optimal2 = nvk_get_image_format_features(pdevice, format,
181                                             VK_IMAGE_TILING_OPTIMAL, 0);
182    buffer2 = nvk_get_buffer_format_features(pdevice, format);
183 
184    pFormatProperties->formatProperties = (VkFormatProperties) {
185       .linearTilingFeatures = vk_format_features2_to_features(linear2),
186       .optimalTilingFeatures = vk_format_features2_to_features(optimal2),
187       .bufferFeatures = vk_format_features2_to_features(buffer2),
188    };
189 
190    vk_foreach_struct(ext, pFormatProperties->pNext) {
191       switch (ext->sType) {
192       case VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3: {
193          VkFormatProperties3 *p = (void *)ext;
194          p->linearTilingFeatures = linear2;
195          p->optimalTilingFeatures = optimal2;
196          p->bufferFeatures = buffer2;
197          break;
198       }
199 
200       case VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT:
201       case VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT:
202          nvk_get_drm_format_modifier_properties_list(pdevice, format, ext);
203          break;
204 
205       default:
206          vk_debug_ignored_stype(ext->sType);
207          break;
208       }
209    }
210 }
211