1 /*
2 * Copyright © 2022 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "anv_nir.h"
25
26 #include "compiler/brw_nir.h"
27
28 const struct anv_descriptor_set_layout *
anv_pipeline_layout_get_push_set(const struct anv_pipeline_sets_layout * layout,uint8_t * set_idx)29 anv_pipeline_layout_get_push_set(const struct anv_pipeline_sets_layout *layout,
30 uint8_t *set_idx)
31 {
32 for (unsigned s = 0; s < ARRAY_SIZE(layout->set); s++) {
33 struct anv_descriptor_set_layout *set_layout = layout->set[s].layout;
34
35 if (!set_layout ||
36 !(set_layout->flags &
37 VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR))
38 continue;
39
40 if (set_idx)
41 *set_idx = s;
42
43 return set_layout;
44 }
45
46 return NULL;
47 }
48
49 /* This function returns a bitfield of used descriptors in the push descriptor
50 * set. You can only call this function before calling
51 * anv_nir_apply_pipeline_layout() as information required is lost after
52 * applying the pipeline layout.
53 */
54 uint32_t
anv_nir_compute_used_push_descriptors(nir_shader * shader,const struct anv_pipeline_sets_layout * layout)55 anv_nir_compute_used_push_descriptors(nir_shader *shader,
56 const struct anv_pipeline_sets_layout *layout)
57 {
58 uint8_t push_set;
59 const struct anv_descriptor_set_layout *push_set_layout =
60 anv_pipeline_layout_get_push_set(layout, &push_set);
61 if (push_set_layout == NULL)
62 return 0;
63
64 uint32_t used_push_bindings = 0;
65 nir_foreach_variable_with_modes(var, shader,
66 nir_var_uniform |
67 nir_var_image |
68 nir_var_mem_ubo |
69 nir_var_mem_ssbo) {
70 if (var->data.descriptor_set == push_set) {
71 uint32_t desc_idx =
72 push_set_layout->binding[var->data.binding].descriptor_index;
73 assert(desc_idx < MAX_PUSH_DESCRIPTORS);
74 used_push_bindings |= BITFIELD_BIT(desc_idx);
75 }
76 }
77
78 nir_foreach_function_impl(impl, shader) {
79 nir_foreach_block(block, impl) {
80 nir_foreach_instr(instr, block) {
81 if (instr->type != nir_instr_type_intrinsic)
82 continue;
83
84 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
85 if (intrin->intrinsic != nir_intrinsic_vulkan_resource_index)
86 continue;
87
88 uint8_t set = nir_intrinsic_desc_set(intrin);
89 if (set != push_set)
90 continue;
91
92 uint32_t binding = nir_intrinsic_binding(intrin);
93 uint32_t desc_idx =
94 push_set_layout->binding[binding].descriptor_index;
95 assert(desc_idx < MAX_PUSH_DESCRIPTORS);
96
97 used_push_bindings |= BITFIELD_BIT(desc_idx);
98 }
99 }
100 }
101
102 return used_push_bindings;
103 }
104
105 /* This function checks whether the shader accesses the push descriptor
106 * buffer. This function must be called after anv_nir_compute_push_layout().
107 */
108 bool
anv_nir_loads_push_desc_buffer(nir_shader * nir,const struct anv_pipeline_sets_layout * layout,const struct anv_pipeline_bind_map * bind_map)109 anv_nir_loads_push_desc_buffer(nir_shader *nir,
110 const struct anv_pipeline_sets_layout *layout,
111 const struct anv_pipeline_bind_map *bind_map)
112 {
113 uint8_t push_set;
114 const struct anv_descriptor_set_layout *push_set_layout =
115 anv_pipeline_layout_get_push_set(layout, &push_set);
116 if (push_set_layout == NULL)
117 return false;
118
119 nir_foreach_function_impl(impl, nir) {
120 nir_foreach_block(block, impl) {
121 nir_foreach_instr(instr, block) {
122 if (instr->type != nir_instr_type_intrinsic)
123 continue;
124
125 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
126 if (intrin->intrinsic != nir_intrinsic_load_ubo)
127 continue;
128
129 const unsigned bt_idx =
130 brw_nir_ubo_surface_index_get_bti(intrin->src[0]);
131 if (bt_idx == UINT32_MAX)
132 continue;
133
134 const struct anv_pipeline_binding *binding =
135 &bind_map->surface_to_descriptor[bt_idx];
136 if (binding->set == ANV_DESCRIPTOR_SET_DESCRIPTORS &&
137 binding->index == push_set) {
138 return true;
139 }
140 }
141 }
142 }
143
144 return false;
145 }
146
147 /* This function computes a bitfield of all the UBOs bindings in the push
148 * descriptor set that are fully promoted to push constants. If a binding's
149 * bit in the field is set, the corresponding binding table entry will not be
150 * accessed by the shader. This function must be called after
151 * anv_nir_compute_push_layout().
152 */
153 uint32_t
anv_nir_push_desc_ubo_fully_promoted(nir_shader * nir,const struct anv_pipeline_sets_layout * layout,const struct anv_pipeline_bind_map * bind_map)154 anv_nir_push_desc_ubo_fully_promoted(nir_shader *nir,
155 const struct anv_pipeline_sets_layout *layout,
156 const struct anv_pipeline_bind_map *bind_map)
157 {
158 uint8_t push_set;
159 const struct anv_descriptor_set_layout *push_set_layout =
160 anv_pipeline_layout_get_push_set(layout, &push_set);
161 if (push_set_layout == NULL)
162 return 0;
163
164 /* Assume every UBO can be promoted first. */
165 uint32_t ubos_fully_promoted = 0;
166 for (uint32_t b = 0; b < push_set_layout->binding_count; b++) {
167 const struct anv_descriptor_set_binding_layout *bind_layout =
168 &push_set_layout->binding[b];
169 if (bind_layout->type == -1)
170 continue;
171
172 assert(bind_layout->descriptor_index < MAX_PUSH_DESCRIPTORS);
173 if (bind_layout->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER)
174 ubos_fully_promoted |= BITFIELD_BIT(bind_layout->descriptor_index);
175 }
176
177 /* For each load_ubo intrinsic, if the descriptor index or the offset is
178 * not a constant, we could not promote to push constant. Then check the
179 * offset + size against the push ranges.
180 */
181 nir_foreach_function_impl(impl, nir) {
182 nir_foreach_block(block, impl) {
183 nir_foreach_instr(instr, block) {
184 if (instr->type != nir_instr_type_intrinsic)
185 continue;
186
187 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
188 if (intrin->intrinsic != nir_intrinsic_load_ubo)
189 continue;
190
191 /* Don't check the load_ubo from descriptor buffers */
192 nir_intrinsic_instr *resource =
193 intrin->src[0].ssa->parent_instr->type == nir_instr_type_intrinsic ?
194 nir_instr_as_intrinsic(intrin->src[0].ssa->parent_instr) : NULL;
195 if (resource == NULL || resource->intrinsic != nir_intrinsic_resource_intel)
196 continue;
197
198 /* Skip load_ubo not loading from the push descriptor */
199 if (nir_intrinsic_desc_set(resource) != push_set)
200 continue;
201
202 uint32_t binding = nir_intrinsic_binding(resource);
203
204 /* If we have indirect indexing in the binding, no push promotion
205 * in possible for the entire binding.
206 */
207 if (!nir_src_is_const(resource->src[1])) {
208 for (uint32_t i = 0; i < push_set_layout->binding[binding].array_size; i++) {
209 ubos_fully_promoted &=
210 ~BITFIELD_BIT(push_set_layout->binding[binding].descriptor_index + i);
211 }
212 continue;
213 }
214
215 const nir_const_value *const_bt_id =
216 nir_src_as_const_value(resource->src[1]);
217 uint32_t bt_id = const_bt_id[0].u32;
218
219 const struct anv_pipeline_binding *pipe_bind =
220 &bind_map->surface_to_descriptor[bt_id];
221
222 const uint32_t desc_idx =
223 push_set_layout->binding[binding].descriptor_index;
224
225 /* If the offset in the entry is dynamic, we can't tell if
226 * promoted or not.
227 */
228 const nir_const_value *const_load_offset =
229 nir_src_as_const_value(intrin->src[1]);
230 if (const_load_offset == NULL) {
231 ubos_fully_promoted &= ~BITFIELD_BIT(desc_idx);
232 continue;
233 }
234
235 /* Check if the load was promoted to a push constant. */
236 const unsigned load_offset = const_load_offset[0].u32;
237 const int load_bytes = nir_intrinsic_dest_components(intrin) *
238 (intrin->def.bit_size / 8);
239
240 bool promoted = false;
241 for (unsigned i = 0; i < ARRAY_SIZE(bind_map->push_ranges); i++) {
242 if (bind_map->push_ranges[i].set == pipe_bind->set &&
243 bind_map->push_ranges[i].index == desc_idx &&
244 bind_map->push_ranges[i].start * 32 <= load_offset &&
245 (bind_map->push_ranges[i].start +
246 bind_map->push_ranges[i].length) * 32 >=
247 (load_offset + load_bytes)) {
248 promoted = true;
249 break;
250 }
251 }
252
253 if (!promoted)
254 ubos_fully_promoted &= ~BITFIELD_BIT(desc_idx);
255 }
256 }
257 }
258
259 return ubos_fully_promoted;
260 }
261