1 /*
2 * Copyright © 2014 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Jason Ekstrand (jason@jlekstrand.net)
25 *
26 */
27
28 #include "nir.h"
29 #include "nir_builder.h"
30 #include "nir_deref.h"
31 #include "compiler/nir_types.h"
32
33 /*
34 * Lowers all copy intrinsics to sequences of load/store intrinsics.
35 */
36
37 static nir_deref_instr *
build_deref_to_next_wildcard(nir_builder * b,nir_deref_instr * parent,nir_deref_instr *** deref_arr)38 build_deref_to_next_wildcard(nir_builder *b,
39 nir_deref_instr *parent,
40 nir_deref_instr ***deref_arr)
41 {
42 for (; **deref_arr; (*deref_arr)++) {
43 if ((**deref_arr)->deref_type == nir_deref_type_array_wildcard)
44 return parent;
45
46 parent = nir_build_deref_follower(b, parent, **deref_arr);
47 }
48
49 assert(**deref_arr == NULL);
50 *deref_arr = NULL;
51 return parent;
52 }
53
54 static void
emit_deref_copy_load_store(nir_builder * b,nir_deref_instr * dst_deref,nir_deref_instr ** dst_deref_arr,nir_deref_instr * src_deref,nir_deref_instr ** src_deref_arr,enum gl_access_qualifier dst_access,enum gl_access_qualifier src_access)55 emit_deref_copy_load_store(nir_builder *b,
56 nir_deref_instr *dst_deref,
57 nir_deref_instr **dst_deref_arr,
58 nir_deref_instr *src_deref,
59 nir_deref_instr **src_deref_arr,
60 enum gl_access_qualifier dst_access,
61 enum gl_access_qualifier src_access)
62 {
63 if (dst_deref_arr || src_deref_arr) {
64 assert(dst_deref_arr && src_deref_arr);
65 dst_deref = build_deref_to_next_wildcard(b, dst_deref, &dst_deref_arr);
66 src_deref = build_deref_to_next_wildcard(b, src_deref, &src_deref_arr);
67 }
68
69 if (dst_deref_arr || src_deref_arr) {
70 assert(dst_deref_arr && src_deref_arr);
71 assert((*dst_deref_arr)->deref_type == nir_deref_type_array_wildcard);
72 assert((*src_deref_arr)->deref_type == nir_deref_type_array_wildcard);
73
74 unsigned length = glsl_get_length(src_deref->type);
75 /* The wildcards should represent the same number of elements */
76 assert(length == glsl_get_length(dst_deref->type));
77 assert(length > 0);
78
79 for (unsigned i = 0; i < length; i++) {
80 emit_deref_copy_load_store(b,
81 nir_build_deref_array_imm(b, dst_deref, i),
82 dst_deref_arr + 1,
83 nir_build_deref_array_imm(b, src_deref, i),
84 src_deref_arr + 1, dst_access, src_access);
85 }
86 } else {
87 assert(glsl_get_bare_type(dst_deref->type) ==
88 glsl_get_bare_type(src_deref->type));
89 assert(glsl_type_is_vector_or_scalar(dst_deref->type));
90
91 nir_store_deref_with_access(b, dst_deref,
92 nir_load_deref_with_access(b, src_deref, src_access),
93 ~0, src_access);
94 }
95 }
96
97 void
nir_lower_deref_copy_instr(nir_builder * b,nir_intrinsic_instr * copy)98 nir_lower_deref_copy_instr(nir_builder *b, nir_intrinsic_instr *copy)
99 {
100 /* Unfortunately, there's just no good way to handle wildcards except to
101 * flip the chain around and walk the list from variable to final pointer.
102 */
103 assert(copy->src[0].is_ssa && copy->src[1].is_ssa);
104 nir_deref_instr *dst = nir_instr_as_deref(copy->src[0].ssa->parent_instr);
105 nir_deref_instr *src = nir_instr_as_deref(copy->src[1].ssa->parent_instr);
106
107 nir_deref_path dst_path, src_path;
108 nir_deref_path_init(&dst_path, dst, NULL);
109 nir_deref_path_init(&src_path, src, NULL);
110
111 b->cursor = nir_before_instr(©->instr);
112 emit_deref_copy_load_store(b, dst_path.path[0], &dst_path.path[1],
113 src_path.path[0], &src_path.path[1],
114 nir_intrinsic_dst_access(copy),
115 nir_intrinsic_src_access(copy));
116
117 nir_deref_path_finish(&dst_path);
118 nir_deref_path_finish(&src_path);
119 }
120
121 static bool
lower_var_copies_impl(nir_function_impl * impl)122 lower_var_copies_impl(nir_function_impl *impl)
123 {
124 bool progress = false;
125
126 nir_builder b;
127 nir_builder_init(&b, impl);
128
129 nir_foreach_block(block, impl) {
130 nir_foreach_instr_safe(instr, block) {
131 if (instr->type != nir_instr_type_intrinsic)
132 continue;
133
134 nir_intrinsic_instr *copy = nir_instr_as_intrinsic(instr);
135 if (copy->intrinsic != nir_intrinsic_copy_deref)
136 continue;
137
138 nir_lower_deref_copy_instr(&b, copy);
139
140 nir_instr_remove(©->instr);
141 nir_deref_instr_remove_if_unused(nir_src_as_deref(copy->src[0]));
142 nir_deref_instr_remove_if_unused(nir_src_as_deref(copy->src[1]));
143
144 progress = true;
145 nir_instr_free(©->instr);
146 }
147 }
148
149 if (progress) {
150 nir_metadata_preserve(impl, nir_metadata_block_index |
151 nir_metadata_dominance);
152 } else {
153 nir_metadata_preserve(impl, nir_metadata_all);
154 }
155
156 return progress;
157 }
158
159 /* Lowers every copy_var instruction in the program to a sequence of
160 * load/store instructions.
161 */
162 bool
nir_lower_var_copies(nir_shader * shader)163 nir_lower_var_copies(nir_shader *shader)
164 {
165 bool progress = false;
166
167 nir_foreach_function(function, shader) {
168 if (function->impl)
169 progress |= lower_var_copies_impl(function->impl);
170 }
171
172 return progress;
173 }
174