1 /*
2 * Copyright © 2014 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "nir.h"
25 #include "nir_vla.h"
26 #include "nir_worklist.h"
27
28 /*
29 * Basic liveness analysis. This works only in SSA form.
30 *
31 * This liveness pass treats phi nodes as being melded to the space between
32 * blocks so that the destinations of a phi are in the livein of the block
33 * in which it resides and the sources are in the liveout of the
34 * corresponding block. By formulating the liveness information in this
35 * way, we ensure that the definition of any variable dominates its entire
36 * live range. This is true because the only way that the definition of an
37 * SSA value may not dominate a use is if the use is in a phi node and the
38 * uses in phi no are in the live-out of the corresponding predecessor
39 * block but not in the live-in of the block containing the phi node.
40 */
41
42 struct live_defs_state {
43 unsigned bitset_words;
44
45 /* Used in propagate_across_edge() */
46 BITSET_WORD *tmp_live;
47
48 nir_block_worklist worklist;
49 };
50
51 /* Initialize the liveness data to zero and add the given block to the
52 * worklist.
53 */
54 static void
init_liveness_block(nir_block * block,struct live_defs_state * state)55 init_liveness_block(nir_block *block,
56 struct live_defs_state *state)
57 {
58 block->live_in = reralloc(block, block->live_in, BITSET_WORD,
59 state->bitset_words);
60 memset(block->live_in, 0, state->bitset_words * sizeof(BITSET_WORD));
61
62 block->live_out = reralloc(block, block->live_out, BITSET_WORD,
63 state->bitset_words);
64 memset(block->live_out, 0, state->bitset_words * sizeof(BITSET_WORD));
65
66 nir_block_worklist_push_head(&state->worklist, block);
67 }
68
69 static bool
set_src_live(nir_src * src,void * void_live)70 set_src_live(nir_src *src, void *void_live)
71 {
72 BITSET_WORD *live = void_live;
73
74 if (nir_src_is_undef(*src))
75 return true; /* undefined variables are never live */
76
77 BITSET_SET(live, src->ssa->index);
78
79 return true;
80 }
81
82 static bool
set_ssa_def_dead(nir_def * def,void * void_live)83 set_ssa_def_dead(nir_def *def, void *void_live)
84 {
85 BITSET_WORD *live = void_live;
86
87 BITSET_CLEAR(live, def->index);
88
89 return true;
90 }
91
92 /** Propagates the live in of succ across the edge to the live out of pred
93 *
94 * Phi nodes exist "between" blocks and all the phi nodes at the start of a
95 * block act "in parallel". When we propagate from the live_in of one
96 * block to the live out of the other, we have to kill any writes from phis
97 * and make live any sources.
98 *
99 * Returns true if updating live out of pred added anything
100 */
101 static bool
propagate_across_edge(nir_block * pred,nir_block * succ,struct live_defs_state * state)102 propagate_across_edge(nir_block *pred, nir_block *succ,
103 struct live_defs_state *state)
104 {
105 BITSET_WORD *live = state->tmp_live;
106 memcpy(live, succ->live_in, state->bitset_words * sizeof *live);
107
108 nir_foreach_phi(phi, succ) {
109 set_ssa_def_dead(&phi->def, live);
110 }
111
112 nir_foreach_phi(phi, succ) {
113 nir_foreach_phi_src(src, phi) {
114 if (src->pred == pred) {
115 set_src_live(&src->src, live);
116 break;
117 }
118 }
119 }
120
121 BITSET_WORD progress = 0;
122 for (unsigned i = 0; i < state->bitset_words; ++i) {
123 progress |= live[i] & ~pred->live_out[i];
124 pred->live_out[i] |= live[i];
125 }
126 return progress != 0;
127 }
128
129 void
nir_live_defs_impl(nir_function_impl * impl)130 nir_live_defs_impl(nir_function_impl *impl)
131 {
132 struct live_defs_state state = {
133 .bitset_words = BITSET_WORDS(impl->ssa_alloc),
134 };
135 state.tmp_live = rzalloc_array(impl, BITSET_WORD, state.bitset_words),
136
137 nir_block_worklist_init(&state.worklist, impl->num_blocks, NULL);
138
139 /* Allocate live_in and live_out sets and add all of the blocks to the
140 * worklist.
141 */
142 nir_foreach_block(block, impl) {
143 init_liveness_block(block, &state);
144 }
145
146 /* We're now ready to work through the worklist and update the liveness
147 * sets of each of the blocks. By the time we get to this point, every
148 * block in the function implementation has been pushed onto the
149 * worklist in reverse order. As long as we keep the worklist
150 * up-to-date as we go, everything will get covered.
151 */
152 while (!nir_block_worklist_is_empty(&state.worklist)) {
153 /* We pop them off in the reverse order we pushed them on. This way
154 * the first walk of the instructions is backwards so we only walk
155 * once in the case of no control flow.
156 */
157 nir_block *block = nir_block_worklist_pop_head(&state.worklist);
158
159 memcpy(block->live_in, block->live_out,
160 state.bitset_words * sizeof(BITSET_WORD));
161
162 nir_if *following_if = nir_block_get_following_if(block);
163 if (following_if)
164 set_src_live(&following_if->condition, block->live_in);
165
166 nir_foreach_instr_reverse(instr, block) {
167 /* Phi nodes are handled seperately so we want to skip them. Since
168 * we are going backwards and they are at the beginning, we can just
169 * break as soon as we see one.
170 */
171 if (instr->type == nir_instr_type_phi)
172 break;
173
174 nir_foreach_def(instr, set_ssa_def_dead, block->live_in);
175 nir_foreach_src(instr, set_src_live, block->live_in);
176 }
177
178 /* Walk over all of the predecessors of the current block updating
179 * their live in with the live out of this one. If anything has
180 * changed, add the predecessor to the work list so that we ensure
181 * that the new information is used.
182 */
183 set_foreach(block->predecessors, entry) {
184 nir_block *pred = (nir_block *)entry->key;
185 if (propagate_across_edge(pred, block, &state))
186 nir_block_worklist_push_tail(&state.worklist, pred);
187 }
188 }
189
190 ralloc_free(state.tmp_live);
191 nir_block_worklist_fini(&state.worklist);
192 }
193
194 /** Return the live set at a cursor
195 *
196 * Note: The bitset returned may be the live_in or live_out from the block in
197 * which the instruction lives. Do not ralloc_free() it directly;
198 * instead, provide a mem_ctx and free that.
199 */
200 const BITSET_WORD *
nir_get_live_defs(nir_cursor cursor,void * mem_ctx)201 nir_get_live_defs(nir_cursor cursor, void *mem_ctx)
202 {
203 nir_block *block = nir_cursor_current_block(cursor);
204 nir_function_impl *impl = nir_cf_node_get_function(&block->cf_node);
205 assert(impl->valid_metadata & nir_metadata_live_defs);
206
207 switch (cursor.option) {
208 case nir_cursor_before_block:
209 return cursor.block->live_in;
210
211 case nir_cursor_after_block:
212 return cursor.block->live_out;
213
214 case nir_cursor_before_instr:
215 if (cursor.instr == nir_block_first_instr(cursor.instr->block))
216 return cursor.instr->block->live_in;
217 break;
218
219 case nir_cursor_after_instr:
220 if (cursor.instr == nir_block_last_instr(cursor.instr->block))
221 return cursor.instr->block->live_out;
222 break;
223 }
224
225 /* If we got here, we're an instruction cursor mid-block */
226 const unsigned bitset_words = BITSET_WORDS(impl->ssa_alloc);
227 BITSET_WORD *live = ralloc_array(mem_ctx, BITSET_WORD, bitset_words);
228 memcpy(live, block->live_out, bitset_words * sizeof(BITSET_WORD));
229
230 nir_foreach_instr_reverse(instr, block) {
231 if (cursor.option == nir_cursor_after_instr && instr == cursor.instr)
232 break;
233
234 /* If someone asked for liveness in the middle of a bunch of phis,
235 * that's an error. Since we are going backwards and they are at the
236 * beginning, we can just blow up as soon as we see one.
237 */
238 assert(instr->type != nir_instr_type_phi);
239 if (instr->type == nir_instr_type_phi)
240 break;
241
242 nir_foreach_def(instr, set_ssa_def_dead, live);
243 nir_foreach_src(instr, set_src_live, live);
244
245 if (cursor.option == nir_cursor_before_instr && instr == cursor.instr)
246 break;
247 }
248
249 return live;
250 }
251
252 static bool
src_does_not_use_def(nir_src * src,void * def)253 src_does_not_use_def(nir_src *src, void *def)
254 {
255 return src->ssa != (nir_def *)def;
256 }
257
258 static bool
search_for_use_after_instr(nir_instr * start,nir_def * def)259 search_for_use_after_instr(nir_instr *start, nir_def *def)
260 {
261 /* Only look for a use strictly after the given instruction */
262 struct exec_node *node = start->node.next;
263 while (!exec_node_is_tail_sentinel(node)) {
264 nir_instr *instr = exec_node_data(nir_instr, node, node);
265 if (!nir_foreach_src(instr, src_does_not_use_def, def))
266 return true;
267 node = node->next;
268 }
269
270 /* If uses are considered to be in the block immediately preceding the if
271 * so we need to also check the following if condition, if any.
272 */
273 nir_if *following_if = nir_block_get_following_if(start->block);
274 if (following_if && following_if->condition.ssa == def)
275 return true;
276
277 return false;
278 }
279
280 /* Returns true if def is live at instr assuming that def comes before
281 * instr in a pre DFS search of the dominance tree.
282 */
283 static bool
nir_def_is_live_at(nir_def * def,nir_instr * instr)284 nir_def_is_live_at(nir_def *def, nir_instr *instr)
285 {
286 if (BITSET_TEST(instr->block->live_out, def->index)) {
287 /* Since def dominates instr, if def is in the liveout of the block,
288 * it's live at instr
289 */
290 return true;
291 } else {
292 if (BITSET_TEST(instr->block->live_in, def->index) ||
293 def->parent_instr->block == instr->block) {
294 /* In this case it is either live coming into instr's block or it
295 * is defined in the same block. In this case, we simply need to
296 * see if it is used after instr.
297 */
298 return search_for_use_after_instr(instr, def);
299 } else {
300 return false;
301 }
302 }
303 }
304
305 bool
nir_defs_interfere(nir_def * a,nir_def * b)306 nir_defs_interfere(nir_def *a, nir_def *b)
307 {
308 if (a->parent_instr == b->parent_instr) {
309 /* Two variables defined at the same time interfere assuming at
310 * least one isn't dead.
311 */
312 return true;
313 } else if (a->parent_instr->type == nir_instr_type_undef ||
314 b->parent_instr->type == nir_instr_type_undef) {
315 /* If either variable is an ssa_undef, then there's no interference */
316 return false;
317 } else if (a->parent_instr->index < b->parent_instr->index) {
318 return nir_def_is_live_at(a, b->parent_instr);
319 } else {
320 return nir_def_is_live_at(b, a->parent_instr);
321 }
322 }
323