• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2014 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  *
23  * Authors:
24  *    Jason Ekstrand (jason@jlekstrand.net)
25  *
26  */
27 
28 #include "nir.h"
29 #include "compiler/nir_types.h"
30 
31 /*
32  * Lowers all copy intrinsics to sequences of load/store intrinsics.
33  */
34 
35 /* Walks down the deref chain and returns the next deref in the chain whose
36  * child is a wildcard.  In other words, given the chain  a[1].foo[*].bar,
37  * this function will return the deref to foo.  Calling it a second time
38  * with the [*].bar, it will return NULL.
39  */
40 static nir_deref *
deref_next_wildcard_parent(nir_deref * deref)41 deref_next_wildcard_parent(nir_deref *deref)
42 {
43    for (nir_deref *tail = deref; tail->child; tail = tail->child) {
44       if (tail->child->deref_type != nir_deref_type_array)
45          continue;
46 
47       nir_deref_array *arr = nir_deref_as_array(tail->child);
48 
49       if (arr->deref_array_type == nir_deref_array_type_wildcard)
50          return tail;
51    }
52 
53    return NULL;
54 }
55 
56 /* This function recursively walks the given deref chain and replaces the
57  * given copy instruction with an equivalent sequence load/store
58  * operations.
59  *
60  * @copy_instr    The copy instruction to replace; new instructions will be
61  *                inserted before this one
62  *
63  * @dest_head     The head of the destination variable deref chain
64  *
65  * @src_head      The head of the source variable deref chain
66  *
67  * @dest_tail     The current tail of the destination variable deref chain;
68  *                this is used for recursion and external callers of this
69  *                function should call it with tail == head
70  *
71  * @src_tail      The current tail of the source variable deref chain;
72  *                this is used for recursion and external callers of this
73  *                function should call it with tail == head
74  *
75  * @state         The current variable lowering state
76  */
77 static void
emit_copy_load_store(nir_intrinsic_instr * copy_instr,nir_deref_var * dest_head,nir_deref_var * src_head,nir_deref * dest_tail,nir_deref * src_tail,nir_shader * shader)78 emit_copy_load_store(nir_intrinsic_instr *copy_instr,
79                      nir_deref_var *dest_head, nir_deref_var *src_head,
80                      nir_deref *dest_tail, nir_deref *src_tail,
81                      nir_shader *shader)
82 {
83    /* Find the next pair of wildcards */
84    nir_deref *src_arr_parent = deref_next_wildcard_parent(src_tail);
85    nir_deref *dest_arr_parent = deref_next_wildcard_parent(dest_tail);
86 
87    if (src_arr_parent || dest_arr_parent) {
88       /* Wildcards had better come in matched pairs */
89       assert(src_arr_parent && dest_arr_parent);
90 
91       nir_deref_array *src_arr = nir_deref_as_array(src_arr_parent->child);
92       nir_deref_array *dest_arr = nir_deref_as_array(dest_arr_parent->child);
93 
94       unsigned length = glsl_get_length(src_arr_parent->type);
95       /* The wildcards should represent the same number of elements */
96       assert(length == glsl_get_length(dest_arr_parent->type));
97       assert(length > 0);
98 
99       /* Walk over all of the elements that this wildcard refers to and
100        * call emit_copy_load_store on each one of them */
101       src_arr->deref_array_type = nir_deref_array_type_direct;
102       dest_arr->deref_array_type = nir_deref_array_type_direct;
103       for (unsigned i = 0; i < length; i++) {
104          src_arr->base_offset = i;
105          dest_arr->base_offset = i;
106          emit_copy_load_store(copy_instr, dest_head, src_head,
107                               &dest_arr->deref, &src_arr->deref, shader);
108       }
109       src_arr->deref_array_type = nir_deref_array_type_wildcard;
110       dest_arr->deref_array_type = nir_deref_array_type_wildcard;
111    } else {
112       /* In this case, we have no wildcards anymore, so all we have to do
113        * is just emit the load and store operations. */
114       src_tail = nir_deref_tail(src_tail);
115       dest_tail = nir_deref_tail(dest_tail);
116 
117       assert(src_tail->type == dest_tail->type);
118 
119       unsigned num_components = glsl_get_vector_elements(src_tail->type);
120       unsigned bit_size = glsl_get_bit_size(src_tail->type);
121 
122       nir_intrinsic_instr *load =
123          nir_intrinsic_instr_create(shader, nir_intrinsic_load_var);
124       load->num_components = num_components;
125       load->variables[0] = nir_deref_var_clone(src_head, load);
126       nir_ssa_dest_init(&load->instr, &load->dest, num_components, bit_size,
127                         NULL);
128 
129       nir_instr_insert_before(&copy_instr->instr, &load->instr);
130 
131       nir_intrinsic_instr *store =
132          nir_intrinsic_instr_create(shader, nir_intrinsic_store_var);
133       store->num_components = num_components;
134       nir_intrinsic_set_write_mask(store, (1 << num_components) - 1);
135       store->variables[0] = nir_deref_var_clone(dest_head, store);
136 
137       store->src[0].is_ssa = true;
138       store->src[0].ssa = &load->dest.ssa;
139 
140       nir_instr_insert_before(&copy_instr->instr, &store->instr);
141    }
142 }
143 
144 /* Lowers a copy instruction to a sequence of load/store instructions
145  *
146  * The new instructions are placed before the copy instruction in the IR.
147  */
148 void
nir_lower_var_copy_instr(nir_intrinsic_instr * copy,nir_shader * shader)149 nir_lower_var_copy_instr(nir_intrinsic_instr *copy, nir_shader *shader)
150 {
151    assert(copy->intrinsic == nir_intrinsic_copy_var);
152    emit_copy_load_store(copy, copy->variables[0], copy->variables[1],
153                         &copy->variables[0]->deref,
154                         &copy->variables[1]->deref, shader);
155 }
156 
157 static void
lower_var_copies_impl(nir_function_impl * impl)158 lower_var_copies_impl(nir_function_impl *impl)
159 {
160    nir_shader *shader = impl->function->shader;
161 
162    nir_foreach_block(block, impl) {
163       nir_foreach_instr_safe(instr, block) {
164          if (instr->type != nir_instr_type_intrinsic)
165             continue;
166 
167          nir_intrinsic_instr *copy = nir_instr_as_intrinsic(instr);
168          if (copy->intrinsic != nir_intrinsic_copy_var)
169             continue;
170 
171          nir_lower_var_copy_instr(copy, shader);
172 
173          nir_instr_remove(&copy->instr);
174          ralloc_free(copy);
175       }
176    }
177 }
178 
179 /* Lowers every copy_var instruction in the program to a sequence of
180  * load/store instructions.
181  */
182 void
nir_lower_var_copies(nir_shader * shader)183 nir_lower_var_copies(nir_shader *shader)
184 {
185    nir_foreach_function(function, shader) {
186       if (function->impl)
187          lower_var_copies_impl(function->impl);
188    }
189 }
190