• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2020 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 
24 #include "anv_nir.h"
25 #include "nir_builder.h"
26 
27 static bool
lower_ubo_load_instr(nir_builder * b,nir_intrinsic_instr * load,UNUSED void * _data)28 lower_ubo_load_instr(nir_builder *b, nir_intrinsic_instr *load,
29                      UNUSED void *_data)
30 {
31    if (load->intrinsic != nir_intrinsic_load_global_constant_offset &&
32        load->intrinsic != nir_intrinsic_load_global_constant_bounded)
33       return false;
34 
35    b->cursor = nir_before_instr(&load->instr);
36 
37    nir_def *base_addr = load->src[0].ssa;
38    nir_def *bound = NULL;
39    if (load->intrinsic == nir_intrinsic_load_global_constant_bounded)
40       bound = load->src[2].ssa;
41 
42    unsigned bit_size = load->def.bit_size;
43    assert(bit_size >= 8 && bit_size % 8 == 0);
44    unsigned byte_size = bit_size / 8;
45 
46    nir_def *val;
47    if (!nir_src_is_divergent(&load->src[0]) && nir_src_is_const(load->src[1])) {
48       uint32_t offset = nir_src_as_uint(load->src[1]);
49 
50       /* Things should be component-aligned. */
51       assert(offset % byte_size == 0);
52 
53       assert(ANV_UBO_ALIGNMENT == 64);
54 
55       unsigned suboffset = offset % 64;
56       uint64_t aligned_offset = offset - suboffset;
57 
58       /* Load two just in case we go over a 64B boundary */
59       nir_def *data[2];
60       for (unsigned i = 0; i < 2; i++) {
61          nir_def *addr = nir_iadd_imm(b, base_addr, aligned_offset + i * 64);
62 
63          data[i] = nir_load_global_constant_uniform_block_intel(
64             b, 16, 32, addr,
65             .access = nir_intrinsic_access(load),
66             .align_mul = 64);
67          if (bound) {
68             data[i] = nir_bcsel(b,
69                                 nir_igt_imm(b, bound, aligned_offset + i * 64 + 63),
70                                 data[i],
71                                 nir_imm_int(b, 0));
72          }
73       }
74 
75       val = nir_extract_bits(b, data, 2, suboffset * 8,
76                              load->num_components, bit_size);
77    } else {
78       nir_def *offset = load->src[1].ssa;
79       nir_def *addr = nir_iadd(b, base_addr, nir_u2u64(b, offset));
80 
81       if (bound) {
82          nir_def *zero = nir_imm_zero(b, load->num_components, bit_size);
83 
84          unsigned load_size = byte_size * load->num_components;
85          nir_def *in_bounds =
86             nir_ilt(b, nir_iadd_imm(b, offset, load_size - 1), bound);
87 
88          nir_push_if(b, in_bounds);
89 
90          nir_def *load_val =
91             nir_build_load_global_constant(b, load->def.num_components,
92                                            load->def.bit_size, addr,
93                                            .access = nir_intrinsic_access(load),
94                                            .align_mul = nir_intrinsic_align_mul(load),
95                                            .align_offset = nir_intrinsic_align_offset(load));
96 
97          nir_pop_if(b, NULL);
98 
99          val = nir_if_phi(b, load_val, zero);
100       } else {
101          val = nir_build_load_global_constant(b, load->def.num_components,
102                                               load->def.bit_size, addr,
103                                               .access = nir_intrinsic_access(load),
104                                               .align_mul = nir_intrinsic_align_mul(load),
105                                               .align_offset = nir_intrinsic_align_offset(load));
106       }
107    }
108 
109    nir_def_replace(&load->def, val);
110 
111    return true;
112 }
113 
114 bool
anv_nir_lower_ubo_loads(nir_shader * shader)115 anv_nir_lower_ubo_loads(nir_shader *shader)
116 {
117    return nir_shader_intrinsics_pass(shader, lower_ubo_load_instr,
118                                        nir_metadata_none,
119                                        NULL);
120 }
121