• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2022 Imagination Technologies Ltd.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a copy
5  * of this software and associated documentation files (the "Software"), to deal
6  * in the Software without restriction, including without limitation the rights
7  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8  * copies of the Software, and to permit persons to whom the Software is
9  * furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21  * SOFTWARE.
22  */
23 
24 #include <assert.h>
25 #include <stdint.h>
26 
27 #include "nir/nir.h"
28 #include "nir/nir_builder.h"
29 #include "nir/nir_search_helpers.h"
30 #include "rogue_nir.h"
31 #include "rogue_nir_helpers.h"
32 
lower_vulkan_resource_index(nir_builder * b,nir_intrinsic_instr * intr,void * pipeline_layout)33 static void lower_vulkan_resource_index(nir_builder *b,
34                                         nir_intrinsic_instr *intr,
35                                         void *pipeline_layout)
36 {
37    unsigned desc_set = nir_intrinsic_desc_set(intr);
38    unsigned binding = nir_intrinsic_binding(intr);
39 
40    nir_ssa_def *def = nir_vec3(b,
41                                nir_imm_int(b, desc_set),
42                                nir_imm_int(b, binding),
43                                nir_imm_int(b, 0));
44    nir_ssa_def_rewrite_uses(&intr->dest.ssa, def);
45    nir_instr_remove(&intr->instr);
46 }
47 
lower_load_vulkan_descriptor(nir_builder * b,nir_intrinsic_instr * intr)48 static void lower_load_vulkan_descriptor(nir_builder *b,
49                                          nir_intrinsic_instr *intr)
50 {
51    /* Loading the descriptor happens as part of the load/store instruction so
52     * this is a no-op.
53     */
54 
55    nir_ssa_def_rewrite_uses(&intr->dest.ssa, intr->src[0].ssa);
56    nir_instr_remove(&intr->instr);
57 }
58 
lower_load_ubo_to_scalar(nir_builder * b,nir_intrinsic_instr * intr)59 static void lower_load_ubo_to_scalar(nir_builder *b, nir_intrinsic_instr *intr)
60 {
61    /* Scalarize the load_ubo. */
62    b->cursor = nir_before_instr(&intr->instr);
63 
64    assert(intr->dest.is_ssa);
65    assert(intr->num_components > 1);
66 
67    nir_ssa_def *loads[NIR_MAX_VEC_COMPONENTS];
68 
69    for (uint8_t i = 0; i < intr->num_components; i++) {
70       size_t scaled_range = nir_intrinsic_range(intr) / intr->num_components;
71       nir_intrinsic_instr *chan_intr =
72          nir_intrinsic_instr_create(b->shader, intr->intrinsic);
73       nir_ssa_dest_init(&chan_intr->instr,
74                         &chan_intr->dest,
75                         1,
76                         intr->dest.ssa.bit_size,
77                         NULL);
78       chan_intr->num_components = 1;
79 
80       nir_intrinsic_set_access(chan_intr, nir_intrinsic_access(intr));
81       nir_intrinsic_set_align_mul(chan_intr, nir_intrinsic_align_mul(intr));
82       nir_intrinsic_set_align_offset(chan_intr,
83                                      nir_intrinsic_align_offset(intr));
84       nir_intrinsic_set_range_base(chan_intr,
85                                    nir_intrinsic_range_base(intr) +
86                                       (i * intr->num_components));
87       nir_intrinsic_set_range(chan_intr, scaled_range);
88 
89       /* Base (desc_set, binding). */
90       nir_src_copy(&chan_intr->src[0], &intr->src[0]);
91 
92       /* Offset (unused). */
93       chan_intr->src[1] = nir_src_for_ssa(nir_imm_int(b, 0));
94 
95       nir_builder_instr_insert(b, &chan_intr->instr);
96 
97       loads[i] = &chan_intr->dest.ssa;
98    }
99 
100    nir_ssa_def_rewrite_uses(&intr->dest.ssa,
101                             nir_vec(b, loads, intr->num_components));
102    nir_instr_remove(&intr->instr);
103 }
104 
105 static bool
lower_intrinsic(nir_builder * b,nir_intrinsic_instr * instr,void * layout)106 lower_intrinsic(nir_builder *b, nir_intrinsic_instr *instr, void *layout)
107 {
108    switch (instr->intrinsic) {
109    case nir_intrinsic_load_vulkan_descriptor:
110       lower_load_vulkan_descriptor(b, instr);
111       return true;
112 
113    case nir_intrinsic_vulkan_resource_index:
114       lower_vulkan_resource_index(b, instr, layout);
115       return true;
116 
117    case nir_intrinsic_load_ubo:
118       lower_load_ubo_to_scalar(b, instr);
119       return true;
120 
121    default:
122       break;
123    }
124 
125    return false;
126 }
127 
lower_impl(nir_function_impl * impl,void * layout)128 static bool lower_impl(nir_function_impl *impl, void *layout)
129 {
130    bool progress = false;
131    nir_builder b;
132 
133    nir_builder_init(&b, impl);
134 
135    nir_foreach_block (block, impl) {
136       nir_foreach_instr_safe (instr, block) {
137          b.cursor = nir_before_instr(instr);
138          switch (instr->type) {
139          case nir_instr_type_intrinsic:
140             progress |=
141                lower_intrinsic(&b, nir_instr_as_intrinsic(instr), layout);
142             break;
143 
144          default:
145             break;
146          }
147       }
148    }
149 
150    if (progress)
151       nir_metadata_preserve(impl, nir_metadata_none);
152    else
153       nir_metadata_preserve(impl, nir_metadata_all);
154 
155    return progress;
156 }
157 
rogue_nir_lower_io(nir_shader * shader,void * layout)158 bool rogue_nir_lower_io(nir_shader *shader, void *layout)
159 {
160    bool progress = false;
161 
162    nir_foreach_function (function, shader) {
163       if (function->impl)
164          progress |= lower_impl(function->impl, layout);
165    }
166 
167    if (progress)
168       nir_opt_dce(shader);
169 
170    return progress;
171 }
172