• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1  /*
2   * Copyright © 2020 Valve Corporation
3   *
4   * Permission is hereby granted, free of charge, to any person obtaining a
5   * copy of this software and associated documentation files (the "Software"),
6   * to deal in the Software without restriction, including without limitation
7   * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8   * and/or sell copies of the Software, and to permit persons to whom the
9   * Software is furnished to do so, subject to the following conditions:
10   *
11   * The above copyright notice and this permission notice (including the next
12   * paragraph) shall be included in all copies or substantial portions of the
13   * Software.
14   *
15   * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16   * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17   * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18   * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19   * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20   * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21   * IN THE SOFTWARE.
22   *
23   */
24  
25  /*
26   * Replaces make availability/visible semantics on barriers with
27   * ACCESS_COHERENT on memory loads/stores
28   */
29  
30  #include "nir/nir.h"
31  #include "shader_enums.h"
32  
33  static bool
get_intrinsic_info(nir_intrinsic_instr * intrin,nir_variable_mode * modes,bool * reads,bool * writes)34  get_intrinsic_info(nir_intrinsic_instr *intrin, nir_variable_mode *modes,
35                     bool *reads, bool *writes)
36  {
37     switch (intrin->intrinsic) {
38     case nir_intrinsic_image_deref_load:
39     case nir_intrinsic_image_deref_sparse_load:
40        *modes = nir_src_as_deref(intrin->src[0])->modes;
41        *reads = true;
42        break;
43     case nir_intrinsic_image_deref_store:
44        *modes = nir_src_as_deref(intrin->src[0])->modes;
45        *writes = true;
46        break;
47     case nir_intrinsic_image_deref_atomic_add:
48     case nir_intrinsic_image_deref_atomic_fadd:
49     case nir_intrinsic_image_deref_atomic_umin:
50     case nir_intrinsic_image_deref_atomic_imin:
51     case nir_intrinsic_image_deref_atomic_umax:
52     case nir_intrinsic_image_deref_atomic_imax:
53     case nir_intrinsic_image_deref_atomic_fmin:
54     case nir_intrinsic_image_deref_atomic_fmax:
55     case nir_intrinsic_image_deref_atomic_and:
56     case nir_intrinsic_image_deref_atomic_or:
57     case nir_intrinsic_image_deref_atomic_xor:
58     case nir_intrinsic_image_deref_atomic_exchange:
59     case nir_intrinsic_image_deref_atomic_comp_swap:
60        *modes = nir_src_as_deref(intrin->src[0])->modes;
61        *reads = true;
62        *writes = true;
63        break;
64     case nir_intrinsic_load_ssbo:
65        *modes = nir_var_mem_ssbo;
66        *reads = true;
67        break;
68     case nir_intrinsic_store_ssbo:
69        *modes = nir_var_mem_ssbo;
70        *writes = true;
71        break;
72     case nir_intrinsic_ssbo_atomic_add:
73     case nir_intrinsic_ssbo_atomic_imin:
74     case nir_intrinsic_ssbo_atomic_umin:
75     case nir_intrinsic_ssbo_atomic_imax:
76     case nir_intrinsic_ssbo_atomic_umax:
77     case nir_intrinsic_ssbo_atomic_and:
78     case nir_intrinsic_ssbo_atomic_or:
79     case nir_intrinsic_ssbo_atomic_xor:
80     case nir_intrinsic_ssbo_atomic_exchange:
81     case nir_intrinsic_ssbo_atomic_comp_swap:
82     case nir_intrinsic_ssbo_atomic_fadd:
83     case nir_intrinsic_ssbo_atomic_fcomp_swap:
84     case nir_intrinsic_ssbo_atomic_fmax:
85     case nir_intrinsic_ssbo_atomic_fmin:
86        *modes = nir_var_mem_ssbo;
87        *reads = true;
88        *writes = true;
89        break;
90     case nir_intrinsic_load_global:
91        *modes = nir_var_mem_global;
92        *reads = true;
93        break;
94     case nir_intrinsic_store_global:
95        *modes = nir_var_mem_global;
96        *writes = true;
97        break;
98     case nir_intrinsic_global_atomic_add:
99     case nir_intrinsic_global_atomic_imin:
100     case nir_intrinsic_global_atomic_umin:
101     case nir_intrinsic_global_atomic_imax:
102     case nir_intrinsic_global_atomic_umax:
103     case nir_intrinsic_global_atomic_and:
104     case nir_intrinsic_global_atomic_or:
105     case nir_intrinsic_global_atomic_xor:
106     case nir_intrinsic_global_atomic_exchange:
107     case nir_intrinsic_global_atomic_comp_swap:
108     case nir_intrinsic_global_atomic_fadd:
109     case nir_intrinsic_global_atomic_fcomp_swap:
110     case nir_intrinsic_global_atomic_fmax:
111     case nir_intrinsic_global_atomic_fmin:
112        *modes = nir_var_mem_global;
113        *reads = true;
114        *writes = true;
115        break;
116     case nir_intrinsic_load_deref:
117        *modes = nir_src_as_deref(intrin->src[0])->modes;
118        *reads = true;
119        break;
120     case nir_intrinsic_store_deref:
121        *modes = nir_src_as_deref(intrin->src[0])->modes;
122        *writes = true;
123        break;
124     case nir_intrinsic_deref_atomic_add:
125     case nir_intrinsic_deref_atomic_imin:
126     case nir_intrinsic_deref_atomic_umin:
127     case nir_intrinsic_deref_atomic_imax:
128     case nir_intrinsic_deref_atomic_umax:
129     case nir_intrinsic_deref_atomic_and:
130     case nir_intrinsic_deref_atomic_or:
131     case nir_intrinsic_deref_atomic_xor:
132     case nir_intrinsic_deref_atomic_exchange:
133     case nir_intrinsic_deref_atomic_comp_swap:
134     case nir_intrinsic_deref_atomic_fadd:
135     case nir_intrinsic_deref_atomic_fmin:
136     case nir_intrinsic_deref_atomic_fmax:
137     case nir_intrinsic_deref_atomic_fcomp_swap:
138        *modes = nir_src_as_deref(intrin->src[0])->modes;
139        *reads = true;
140        *writes = true;
141        break;
142     default:
143        return false;
144     }
145     return true;
146  }
147  
148  static bool
visit_instr(nir_instr * instr,uint32_t * cur_modes,unsigned vis_avail_sem)149  visit_instr(nir_instr *instr, uint32_t *cur_modes, unsigned vis_avail_sem)
150  {
151     if (instr->type != nir_instr_type_intrinsic)
152        return false;
153     nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
154  
155     if (intrin->intrinsic == nir_intrinsic_scoped_barrier &&
156         (nir_intrinsic_memory_semantics(intrin) & vis_avail_sem)) {
157        *cur_modes |= nir_intrinsic_memory_modes(intrin);
158  
159        unsigned semantics = nir_intrinsic_memory_semantics(intrin);
160        nir_intrinsic_set_memory_semantics(
161           intrin, semantics & ~vis_avail_sem);
162        return true;
163     }
164  
165     if (!*cur_modes)
166        return false; /* early exit */
167  
168     nir_variable_mode modes;
169     bool reads = false, writes = false;
170     if (!get_intrinsic_info(intrin, &modes, &reads, &writes))
171        return false;
172  
173     if (!reads && vis_avail_sem == NIR_MEMORY_MAKE_VISIBLE)
174        return false;
175     if (!writes && vis_avail_sem == NIR_MEMORY_MAKE_AVAILABLE)
176        return false;
177  
178     if (!nir_intrinsic_has_access(intrin))
179        return false;
180  
181     unsigned access = nir_intrinsic_access(intrin);
182  
183     if (access & (ACCESS_NON_READABLE | ACCESS_NON_WRITEABLE | ACCESS_CAN_REORDER | ACCESS_COHERENT))
184        return false;
185  
186     if (*cur_modes & modes) {
187        nir_intrinsic_set_access(intrin, access | ACCESS_COHERENT);
188        return true;
189     }
190  
191     return false;
192  }
193  
194  static bool
lower_make_visible(nir_cf_node * cf_node,uint32_t * cur_modes)195  lower_make_visible(nir_cf_node *cf_node, uint32_t *cur_modes)
196  {
197     bool progress = false;
198     switch (cf_node->type) {
199     case nir_cf_node_block: {
200        nir_block *block = nir_cf_node_as_block(cf_node);
201        nir_foreach_instr(instr, block)
202           progress |= visit_instr(instr, cur_modes, NIR_MEMORY_MAKE_VISIBLE);
203        break;
204     }
205     case nir_cf_node_if: {
206        nir_if *nif = nir_cf_node_as_if(cf_node);
207        uint32_t cur_modes_then = *cur_modes;
208        uint32_t cur_modes_else = *cur_modes;
209        foreach_list_typed(nir_cf_node, if_node, node, &nif->then_list)
210           progress |= lower_make_visible(if_node, &cur_modes_then);
211        foreach_list_typed(nir_cf_node, if_node, node, &nif->else_list)
212           progress |= lower_make_visible(if_node, &cur_modes_else);
213        *cur_modes |= cur_modes_then | cur_modes_else;
214        break;
215     }
216     case nir_cf_node_loop: {
217        nir_loop *loop = nir_cf_node_as_loop(cf_node);
218        bool loop_progress;
219        do {
220           loop_progress = false;
221           foreach_list_typed(nir_cf_node, loop_node, node, &loop->body)
222              loop_progress |= lower_make_visible(loop_node, cur_modes);
223           progress |= loop_progress;
224        } while (loop_progress);
225        break;
226     }
227     case nir_cf_node_function:
228        unreachable("Invalid cf type");
229     }
230     return progress;
231  }
232  
233  static bool
lower_make_available(nir_cf_node * cf_node,uint32_t * cur_modes)234  lower_make_available(nir_cf_node *cf_node, uint32_t *cur_modes)
235  {
236     bool progress = false;
237     switch (cf_node->type) {
238     case nir_cf_node_block: {
239        nir_block *block = nir_cf_node_as_block(cf_node);
240        nir_foreach_instr_reverse(instr, block)
241           progress |= visit_instr(instr, cur_modes, NIR_MEMORY_MAKE_AVAILABLE);
242        break;
243     }
244     case nir_cf_node_if: {
245        nir_if *nif = nir_cf_node_as_if(cf_node);
246        uint32_t cur_modes_then = *cur_modes;
247        uint32_t cur_modes_else = *cur_modes;
248        foreach_list_typed_reverse(nir_cf_node, if_node, node, &nif->then_list)
249           progress |= lower_make_available(if_node, &cur_modes_then);
250        foreach_list_typed_reverse(nir_cf_node, if_node, node, &nif->else_list)
251           progress |= lower_make_available(if_node, &cur_modes_else);
252        *cur_modes |= cur_modes_then | cur_modes_else;
253        break;
254     }
255     case nir_cf_node_loop: {
256        nir_loop *loop = nir_cf_node_as_loop(cf_node);
257        bool loop_progress;
258        do {
259           loop_progress = false;
260           foreach_list_typed_reverse(nir_cf_node, loop_node, node, &loop->body)
261              loop_progress |= lower_make_available(loop_node, cur_modes);
262           progress |= loop_progress;
263        } while (loop_progress);
264        break;
265     }
266     case nir_cf_node_function:
267        unreachable("Invalid cf type");
268     }
269     return progress;
270  }
271  
272  bool
nir_lower_memory_model(nir_shader * shader)273  nir_lower_memory_model(nir_shader *shader)
274  {
275     bool progress = false;
276  
277     struct exec_list *cf_list = &nir_shader_get_entrypoint(shader)->body;
278  
279     uint32_t modes = 0;
280     foreach_list_typed(nir_cf_node, cf_node, node, cf_list)
281        progress |= lower_make_visible(cf_node, &modes);
282  
283     modes = 0;
284     foreach_list_typed_reverse(nir_cf_node, cf_node, node, cf_list)
285        progress |= lower_make_available(cf_node, &modes);
286  
287     return progress;
288  }
289