1 /*
2 * Copyright © 2020 Valve Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 */
24
25 #include "nir.h"
26 #include "nir_builder.h"
27
28 static bool
nir_lower_discard_to_demote_instr(nir_builder * b,nir_instr * instr,void * data)29 nir_lower_discard_to_demote_instr(nir_builder *b, nir_instr *instr, void *data)
30 {
31 if (instr->type != nir_instr_type_intrinsic)
32 return false;
33
34 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
35 switch (intrin->intrinsic) {
36 case nir_intrinsic_discard:
37 intrin->intrinsic = nir_intrinsic_demote;
38 return true;
39 case nir_intrinsic_discard_if:
40 intrin->intrinsic = nir_intrinsic_demote_if;
41 return true;
42 case nir_intrinsic_load_helper_invocation:
43 intrin->intrinsic = nir_intrinsic_is_helper_invocation;
44 return true;
45 default:
46 return false;
47 }
48 }
49
50 static bool
nir_lower_demote_to_discard_instr(nir_builder * b,nir_instr * instr,void * data)51 nir_lower_demote_to_discard_instr(nir_builder *b, nir_instr *instr, void *data)
52 {
53 if (instr->type != nir_instr_type_intrinsic)
54 return false;
55
56 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
57 switch (intrin->intrinsic) {
58 case nir_intrinsic_demote:
59 intrin->intrinsic = nir_intrinsic_discard;
60 return true;
61 case nir_intrinsic_demote_if:
62 intrin->intrinsic = nir_intrinsic_discard_if;
63 return true;
64 case nir_intrinsic_is_helper_invocation:
65 case nir_intrinsic_load_helper_invocation: {
66 /* If the shader doesn't need helper invocations,
67 * we can assume there are none */
68 b->cursor = nir_before_instr(instr);
69 nir_ssa_def *zero = nir_imm_false(b);
70 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, zero);
71 nir_instr_remove_v(instr);
72 return true;
73 }
74 default:
75 return false;
76 }
77 }
78
79 static nir_ssa_def *
insert_is_helper(nir_builder * b,nir_instr * instr)80 insert_is_helper(nir_builder *b, nir_instr *instr)
81 {
82 /* find best place to insert is_helper */
83 nir_cf_node *node = &instr->block->cf_node;
84 while (node->parent->type != nir_cf_node_function)
85 node = nir_cf_node_prev(node->parent);
86 nir_block *block = nir_cf_node_as_block(node);
87 if (block == instr->block) {
88 b->cursor = nir_before_instr(instr);
89 } else {
90 b->cursor = nir_after_block_before_jump(block);
91 }
92 return nir_is_helper_invocation(b, 1);
93 }
94
95
96 static bool
nir_lower_load_helper_to_is_helper(nir_builder * b,nir_instr * instr,void * data)97 nir_lower_load_helper_to_is_helper(nir_builder *b, nir_instr *instr, void *data)
98 {
99 if (instr->type != nir_instr_type_intrinsic)
100 return false;
101
102 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
103 nir_ssa_def *is_helper = *(nir_ssa_def**) data;
104 switch (intrin->intrinsic) {
105 case nir_intrinsic_demote:
106 case nir_intrinsic_demote_if:
107 /* insert is_helper at last top level occasion */
108 if (is_helper == NULL) {
109 is_helper = insert_is_helper(b, instr);
110 *(nir_ssa_def**)data = is_helper;
111 return true;
112 } else {
113 return false;
114 }
115 case nir_intrinsic_load_helper_invocation:
116 /* Don't update data: as long as we didn't encounter any demote(),
117 * we can insert new is_helper() intrinsics. These are placed at
118 * top-level blocks to ensure correct behavior w.r.t. loops */
119 if (is_helper == NULL)
120 is_helper = insert_is_helper(b, instr);
121 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, is_helper);
122 nir_instr_remove_v(instr);
123 return true;
124 default:
125 return false;
126 }
127 }
128
129 /**
130 * Optimize discard and demote opcodes.
131 *
132 * If force_correct_quad_ops_after_discard is true and quad operations are
133 * used, discard() will be converted to demote() and gl_HelperInvocation will
134 * be lowered to helperInvocationEXT(). This is intended as workaround for
135 * game bugs to force correct derivatives after kill. This lowering is not
136 * valid in the general case as it might change the result of subgroup
137 * operations and loop behavior.
138 *
139 * Otherwise, if demote is used and no ops need helper invocations, demote()
140 * will be converted to discard() as an optimization.
141 */
142 bool
nir_lower_discard_or_demote(nir_shader * shader,bool force_correct_quad_ops_after_discard)143 nir_lower_discard_or_demote(nir_shader *shader,
144 bool force_correct_quad_ops_after_discard)
145 {
146 if (shader->info.stage != MESA_SHADER_FRAGMENT)
147 return false;
148
149 /* We need uses_discard/demote and needs_*_helper_invocations. */
150 nir_shader_gather_info(shader, nir_shader_get_entrypoint(shader));
151 /* Validate that if uses_demote is set, uses_discard is also be set. */
152 assert(!shader->info.fs.uses_demote || shader->info.fs.uses_discard);
153
154 /* Quick skip. */
155 if (!shader->info.fs.uses_discard)
156 return false;
157
158 bool progress = false;
159
160 if (force_correct_quad_ops_after_discard &&
161 shader->info.fs.needs_quad_helper_invocations) {
162 /* If we need correct derivatives, convert discard to demote only when
163 * derivatives are actually used.
164 */
165 progress = nir_shader_instructions_pass(shader,
166 nir_lower_discard_to_demote_instr,
167 nir_metadata_block_index |
168 nir_metadata_dominance |
169 nir_metadata_live_ssa_defs |
170 nir_metadata_instr_index,
171 NULL);
172 shader->info.fs.uses_demote = true;
173 } else if (!shader->info.fs.needs_quad_helper_invocations &&
174 !shader->info.fs.needs_all_helper_invocations &&
175 shader->info.fs.uses_demote) {
176 /* If we don't need any helper invocations, convert demote to discard. */
177 progress = nir_shader_instructions_pass(shader,
178 nir_lower_demote_to_discard_instr,
179 nir_metadata_block_index |
180 nir_metadata_dominance,
181 NULL);
182 shader->info.fs.uses_demote = false;
183 } else if (shader->info.fs.uses_demote &&
184 BITSET_TEST(shader->info.system_values_read,
185 nir_system_value_from_intrinsic(nir_intrinsic_load_helper_invocation))) {
186 /* load_helper needs to preserve the value (whether an invocation is
187 * a helper lane) from the beginning of the shader. */
188 nir_ssa_def *is_helper = NULL;
189 progress = nir_shader_instructions_pass(shader,
190 nir_lower_load_helper_to_is_helper,
191 nir_metadata_block_index |
192 nir_metadata_dominance,
193 &is_helper);
194 BITSET_CLEAR(shader->info.system_values_read,
195 nir_system_value_from_intrinsic(nir_intrinsic_load_helper_invocation));
196 }
197
198 /* Validate again that if uses_demote is set, uses_discard is also be set. */
199 assert(!shader->info.fs.uses_demote || shader->info.fs.uses_discard);
200 return progress;
201 }
202