• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2019 Red Hat.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 
24 #include "lvp_private.h"
25 #include "nir.h"
26 #include "nir_builder.h"
27 #include "lvp_lower_vulkan_resource.h"
28 
29 static bool
lower_vulkan_resource_index(const nir_instr * instr,const void * data_cb)30 lower_vulkan_resource_index(const nir_instr *instr, const void *data_cb)
31 {
32    if (instr->type == nir_instr_type_intrinsic) {
33       nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
34       switch (intrin->intrinsic) {
35       case nir_intrinsic_vulkan_resource_index:
36       case nir_intrinsic_vulkan_resource_reindex:
37       case nir_intrinsic_load_vulkan_descriptor:
38       case nir_intrinsic_get_ssbo_size:
39       case nir_intrinsic_image_deref_sparse_load:
40       case nir_intrinsic_image_deref_load:
41       case nir_intrinsic_image_deref_store:
42       case nir_intrinsic_image_deref_atomic_add:
43       case nir_intrinsic_image_deref_atomic_imin:
44       case nir_intrinsic_image_deref_atomic_umin:
45       case nir_intrinsic_image_deref_atomic_imax:
46       case nir_intrinsic_image_deref_atomic_umax:
47       case nir_intrinsic_image_deref_atomic_and:
48       case nir_intrinsic_image_deref_atomic_or:
49       case nir_intrinsic_image_deref_atomic_xor:
50       case nir_intrinsic_image_deref_atomic_exchange:
51       case nir_intrinsic_image_deref_atomic_comp_swap:
52       case nir_intrinsic_image_deref_atomic_fadd:
53       case nir_intrinsic_image_deref_size:
54       case nir_intrinsic_image_deref_samples:
55          return true;
56       default:
57          return false;
58       }
59    }
60    if (instr->type == nir_instr_type_tex) {
61       return true;
62    }
63    return false;
64 }
65 
66 static bool
lower_uniform_block_access(const nir_instr * instr,const void * data_cb)67 lower_uniform_block_access(const nir_instr *instr, const void *data_cb)
68 {
69    if (instr->type != nir_instr_type_intrinsic)
70       return false;
71 
72    nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
73    if (intrin->intrinsic != nir_intrinsic_load_deref)
74       return false;
75    nir_deref_instr *deref = nir_instr_as_deref(intrin->src[0].ssa->parent_instr);
76    return deref->modes == nir_var_mem_ubo;
77 }
78 
79 static nir_ssa_def *
lower_block_instr(nir_builder * b,nir_instr * instr,void * data_cb)80 lower_block_instr(nir_builder *b, nir_instr *instr, void *data_cb)
81 {
82    nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
83    nir_binding nb = nir_chase_binding(intrin->src[0]);
84    const struct lvp_pipeline_layout *layout = data_cb;
85    const struct lvp_descriptor_set_binding_layout *binding =
86       get_binding_layout(layout, nb.desc_set, nb.binding);
87    if (binding->type != VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
88       return NULL;
89    if (!binding->array_size)
90       return NIR_LOWER_INSTR_PROGRESS_REPLACE;
91 
92    assert(intrin->src[0].ssa->num_components == 2);
93    unsigned value = 0;
94    for (unsigned s = 0; s < nb.desc_set; s++)
95       value += get_set_layout(layout, s)->stage[b->shader->info.stage].uniform_block_size;
96    if (layout->push_constant_stages & BITFIELD_BIT(b->shader->info.stage))
97       value += layout->push_constant_size;
98    value += binding->stage[b->shader->info.stage].uniform_block_offset;
99 
100    b->cursor = nir_before_instr(instr);
101    nir_ssa_def *offset = nir_imm_ivec2(b, 0, value);
102    nir_ssa_def *added = nir_iadd(b, intrin->src[0].ssa, offset);
103    nir_deref_instr *deref = nir_instr_as_deref(intrin->src[0].ssa->parent_instr);
104    nir_deref_instr *cast = nir_build_deref_cast(b, added, deref->modes, deref->type, 0);
105    nir_instr_rewrite_src_ssa(instr, &intrin->src[0], &cast->dest.ssa);
106    return NIR_LOWER_INSTR_PROGRESS;
107 }
108 
lower_vri_intrin_vri(struct nir_builder * b,nir_instr * instr,void * data_cb)109 static nir_ssa_def *lower_vri_intrin_vri(struct nir_builder *b,
110                                            nir_instr *instr, void *data_cb)
111 {
112    nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
113    unsigned desc_set_idx = nir_intrinsic_desc_set(intrin);
114    unsigned binding_idx = nir_intrinsic_binding(intrin);
115    const struct lvp_pipeline_layout *layout = data_cb;
116    const struct lvp_descriptor_set_binding_layout *binding =
117       get_binding_layout(data_cb, desc_set_idx, binding_idx);
118    int value = 0;
119    bool is_ubo = (binding->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
120                   binding->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
121 
122    /* always load inline uniform blocks from ubo0 */
123    if (binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
124       return nir_imm_ivec2(b, 0, 0);
125 
126    for (unsigned s = 0; s < desc_set_idx; s++) {
127      if (!layout->vk.set_layouts[s])
128         continue;
129      if (is_ubo)
130        value += get_set_layout(layout, s)->stage[b->shader->info.stage].const_buffer_count;
131      else
132        value += get_set_layout(layout, s)->stage[b->shader->info.stage].shader_buffer_count;
133    }
134    if (is_ubo)
135      value += binding->stage[b->shader->info.stage].const_buffer_index + 1;
136    else
137      value += binding->stage[b->shader->info.stage].shader_buffer_index;
138 
139    /* The SSA size for indices is the same as for pointers.  We use
140     * nir_addr_format_32bit_index_offset so we need a vec2.  We don't need all
141     * that data so just stuff a 0 in the second component.
142     */
143    if (nir_src_is_const(intrin->src[0])) {
144       value += nir_src_comp_as_int(intrin->src[0], 0);
145       return nir_imm_ivec2(b, value, 0);
146    } else
147       return nir_vec2(b, nir_iadd_imm(b, intrin->src[0].ssa, value),
148                          nir_imm_int(b, 0));
149 }
150 
lower_vri_intrin_vrri(struct nir_builder * b,nir_instr * instr,void * data_cb)151 static nir_ssa_def *lower_vri_intrin_vrri(struct nir_builder *b,
152                                           nir_instr *instr, void *data_cb)
153 {
154    nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
155    nir_ssa_def *old_index = nir_ssa_for_src(b, intrin->src[0], 1);
156    nir_ssa_def *delta = nir_ssa_for_src(b, intrin->src[1], 1);
157    return nir_vec2(b, nir_iadd(b, old_index, delta),
158                       nir_imm_int(b, 0));
159 }
160 
lower_vri_intrin_lvd(struct nir_builder * b,nir_instr * instr,void * data_cb)161 static nir_ssa_def *lower_vri_intrin_lvd(struct nir_builder *b,
162                                          nir_instr *instr, void *data_cb)
163 {
164    nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
165    nir_ssa_def *index = nir_ssa_for_src(b, intrin->src[0], 1);
166    return nir_vec2(b, index, nir_imm_int(b, 0));
167 }
168 
169 /*
170  * Return a bitset of the texture units or sampler units used by a
171  * texture instruction.  Note that 'used' is expected to be already
172  * initialized.  i.e. this function does not zero-out the bitset before
173  * setting any bits.
174  */
175 static void
lower_vri_instr_tex_deref(nir_tex_instr * tex,nir_tex_src_type deref_src_type,gl_shader_stage stage,struct lvp_pipeline_layout * layout,BITSET_WORD used[],size_t used_size)176 lower_vri_instr_tex_deref(nir_tex_instr *tex,
177                           nir_tex_src_type deref_src_type,
178                           gl_shader_stage stage,
179                           struct lvp_pipeline_layout *layout,
180                           BITSET_WORD used[], // textures or samplers
181                           size_t used_size)   // used[] size, in bits
182 {
183    int deref_src_idx = nir_tex_instr_src_index(tex, deref_src_type);
184 
185    if (deref_src_idx < 0)
186       return;
187 
188    nir_deref_instr *deref_instr = nir_src_as_deref(tex->src[deref_src_idx].src);
189    nir_variable *var = nir_deref_instr_get_variable(deref_instr);
190    unsigned desc_set_idx = var->data.descriptor_set;
191    unsigned binding_idx = var->data.binding;
192    int value = 0;
193 
194    const struct lvp_descriptor_set_binding_layout *binding =
195       get_binding_layout(layout, desc_set_idx, binding_idx);
196    nir_tex_instr_remove_src(tex, deref_src_idx);
197    for (unsigned s = 0; s < desc_set_idx; s++) {
198       if (!layout->vk.set_layouts[s])
199          continue;
200       if (deref_src_type == nir_tex_src_sampler_deref)
201          value += get_set_layout(layout, s)->stage[stage].sampler_count;
202       else
203          value += get_set_layout(layout, s)->stage[stage].sampler_view_count;
204    }
205    if (deref_src_type == nir_tex_src_sampler_deref)
206       value += binding->stage[stage].sampler_index;
207    else
208       value += binding->stage[stage].sampler_view_index;
209 
210    if (deref_instr->deref_type == nir_deref_type_array) {
211       if (nir_src_is_const(deref_instr->arr.index))
212          value += nir_src_as_uint(deref_instr->arr.index);
213       else {
214          if (deref_src_type == nir_tex_src_sampler_deref)
215             nir_tex_instr_add_src(tex, nir_tex_src_sampler_offset, deref_instr->arr.index);
216          else
217             nir_tex_instr_add_src(tex, nir_tex_src_texture_offset, deref_instr->arr.index);
218       }
219    }
220    if (deref_src_type == nir_tex_src_sampler_deref)
221       tex->sampler_index = value;
222    else
223       tex->texture_index = value;
224 
225    if (deref_instr->deref_type == nir_deref_type_array) {
226       assert(glsl_type_is_array(var->type));
227       assert(value >= 0);
228       assert(value < used_size);
229       if (nir_src_is_const(deref_instr->arr.index)) {
230          BITSET_SET(used, value);
231       } else {
232          unsigned size = glsl_get_aoa_size(var->type);
233          assert(value + size <= used_size);
234          BITSET_SET_RANGE(used, value, value+size);
235       }
236    } else {
237       assert(value < used_size);
238       BITSET_SET(used, value);
239    }
240 }
241 
lower_vri_instr_tex(struct nir_builder * b,nir_tex_instr * tex,void * data_cb)242 static void lower_vri_instr_tex(struct nir_builder *b,
243                                 nir_tex_instr *tex, void *data_cb)
244 {
245    struct lvp_pipeline_layout *layout = data_cb;
246    lower_vri_instr_tex_deref(tex, nir_tex_src_sampler_deref,
247                              b->shader->info.stage, layout,
248                              b->shader->info.samplers_used,
249                              BITSET_SIZE(b->shader->info.samplers_used));
250 
251    lower_vri_instr_tex_deref(tex, nir_tex_src_texture_deref,
252                              b->shader->info.stage, layout,
253                              b->shader->info.textures_used,
254                              BITSET_SIZE(b->shader->info.textures_used));
255 }
256 
lower_vri_intrin_image(struct nir_builder * b,nir_intrinsic_instr * intrin,void * data_cb)257 static void lower_vri_intrin_image(struct nir_builder *b,
258                                    nir_intrinsic_instr *intrin, void *data_cb)
259 {
260    const struct lvp_pipeline_layout *layout = data_cb;
261    gl_shader_stage stage = b->shader->info.stage;
262 
263    nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
264    nir_variable *var = nir_deref_instr_get_variable(deref);
265    unsigned desc_set_idx = var->data.descriptor_set;
266    unsigned binding_idx = var->data.binding;
267    const struct lvp_descriptor_set_binding_layout *binding =
268       get_binding_layout(layout, desc_set_idx, binding_idx);
269 
270    int value = 0;
271    for (unsigned s = 0; s < desc_set_idx; s++) {
272       if (!layout->vk.set_layouts[s])
273          continue;
274       value += get_set_layout(layout, s)->stage[stage].image_count;
275    }
276    value += binding->stage[stage].image_index;
277 
278    if (deref->deref_type == nir_deref_type_array) {
279       assert(glsl_type_is_array(var->type));
280       assert(value >= 0);
281       if (nir_src_is_const(deref->arr.index)) {
282          value += nir_src_as_uint(deref->arr.index);
283          BITSET_SET(b->shader->info.images_used, value);
284       } else {
285          unsigned size = glsl_get_aoa_size(var->type);
286          BITSET_SET_RANGE(b->shader->info.images_used,
287                           value, value + size - 1);
288       }
289    } else {
290       BITSET_SET(b->shader->info.images_used, value);
291    }
292 }
293 
lower_vri_instr(struct nir_builder * b,nir_instr * instr,void * data_cb)294 static nir_ssa_def *lower_vri_instr(struct nir_builder *b,
295                                     nir_instr *instr, void *data_cb)
296 {
297    if (instr->type == nir_instr_type_intrinsic) {
298       nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
299       switch (intrin->intrinsic) {
300       case nir_intrinsic_vulkan_resource_index:
301          return lower_vri_intrin_vri(b, instr, data_cb);
302 
303       case nir_intrinsic_vulkan_resource_reindex:
304          return lower_vri_intrin_vrri(b, instr, data_cb);
305 
306       case nir_intrinsic_load_vulkan_descriptor:
307          return lower_vri_intrin_lvd(b, instr, data_cb);
308 
309       case nir_intrinsic_get_ssbo_size: {
310          /* The result of the load_vulkan_descriptor is a vec2(index, offset)
311           * but we only want the index in get_ssbo_size.
312           */
313          b->cursor = nir_before_instr(&intrin->instr);
314          nir_ssa_def *index = nir_ssa_for_src(b, intrin->src[0], 1);
315          nir_instr_rewrite_src(&intrin->instr, &intrin->src[0],
316                                nir_src_for_ssa(index));
317          return NULL;
318       }
319 
320       case nir_intrinsic_image_deref_sparse_load:
321       case nir_intrinsic_image_deref_load:
322       case nir_intrinsic_image_deref_store:
323       case nir_intrinsic_image_deref_atomic_add:
324       case nir_intrinsic_image_deref_atomic_imin:
325       case nir_intrinsic_image_deref_atomic_umin:
326       case nir_intrinsic_image_deref_atomic_imax:
327       case nir_intrinsic_image_deref_atomic_umax:
328       case nir_intrinsic_image_deref_atomic_and:
329       case nir_intrinsic_image_deref_atomic_or:
330       case nir_intrinsic_image_deref_atomic_xor:
331       case nir_intrinsic_image_deref_atomic_exchange:
332       case nir_intrinsic_image_deref_atomic_comp_swap:
333       case nir_intrinsic_image_deref_atomic_fadd:
334       case nir_intrinsic_image_deref_size:
335       case nir_intrinsic_image_deref_samples:
336          lower_vri_intrin_image(b, intrin, data_cb);
337          return NULL;
338 
339       default:
340          return NULL;
341       }
342    }
343    if (instr->type == nir_instr_type_tex)
344       lower_vri_instr_tex(b, nir_instr_as_tex(instr), data_cb);
345    return NULL;
346 }
347 
lvp_lower_pipeline_layout(const struct lvp_device * device,struct lvp_pipeline_layout * layout,nir_shader * shader)348 void lvp_lower_pipeline_layout(const struct lvp_device *device,
349                                struct lvp_pipeline_layout *layout,
350                                nir_shader *shader)
351 {
352    nir_shader_lower_instructions(shader, lower_uniform_block_access, lower_block_instr, layout);
353    nir_shader_lower_instructions(shader, lower_vulkan_resource_index, lower_vri_instr, layout);
354    nir_foreach_variable_with_modes(var, shader, nir_var_uniform |
355                                                 nir_var_image) {
356       const struct glsl_type *type = var->type;
357       enum glsl_base_type base_type =
358          glsl_get_base_type(glsl_without_array(type));
359       unsigned desc_set_idx = var->data.descriptor_set;
360       unsigned binding_idx = var->data.binding;
361       const struct lvp_descriptor_set_binding_layout *binding =
362          get_binding_layout(layout, desc_set_idx, binding_idx);
363       int value = 0;
364       var->data.descriptor_set = 0;
365       if (base_type == GLSL_TYPE_SAMPLER || base_type == GLSL_TYPE_TEXTURE) {
366          if (binding->type == VK_DESCRIPTOR_TYPE_SAMPLER) {
367             for (unsigned s = 0; s < desc_set_idx; s++) {
368                if (!layout->vk.set_layouts[s])
369                   continue;
370                value += get_set_layout(layout, s)->stage[shader->info.stage].sampler_count;
371             }
372             value += binding->stage[shader->info.stage].sampler_index;
373          } else {
374             for (unsigned s = 0; s < desc_set_idx; s++) {
375                if (!layout->vk.set_layouts[s])
376                   continue;
377                value += get_set_layout(layout, s)->stage[shader->info.stage].sampler_view_count;
378             }
379             value += binding->stage[shader->info.stage].sampler_view_index;
380          }
381          var->data.binding = value;
382       }
383       if (base_type == GLSL_TYPE_IMAGE) {
384          var->data.descriptor_set = 0;
385          for (unsigned s = 0; s < desc_set_idx; s++) {
386            if (!layout->vk.set_layouts[s])
387               continue;
388            value += get_set_layout(layout, s)->stage[shader->info.stage].image_count;
389          }
390          value += binding->stage[shader->info.stage].image_index;
391          var->data.binding = value;
392       }
393    }
394 }
395