1 /*
2 * Copyright 2016 Bas Nieuwenhuizen
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
11 *
12 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
13 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
14 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
15 * THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM,
16 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
17 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
18 * USE OR OTHER DEALINGS IN THE SOFTWARE.
19 *
20 * The above copyright notice and this permission notice (including the
21 * next paragraph) shall be included in all copies or substantial portions
22 * of the Software.
23 *
24 */
25 #ifndef AC_LLVM_BUILD_H
26 #define AC_LLVM_BUILD_H
27
28 #include "ac_llvm_util.h"
29 #include "ac_shader_abi.h"
30 #include "ac_shader_args.h"
31 #include "ac_shader_util.h"
32 #include "amd_family.h"
33 #include "compiler/nir/nir.h"
34 #include <llvm-c/Core.h>
35
36 #include <stdbool.h>
37
38 #ifdef __cplusplus
39 extern "C" {
40 #endif
41
42 enum
43 {
44 AC_ADDR_SPACE_FLAT = 0, /* Slower than global. */
45 AC_ADDR_SPACE_GLOBAL = 1,
46 AC_ADDR_SPACE_GDS = 2,
47 AC_ADDR_SPACE_LDS = 3,
48 AC_ADDR_SPACE_CONST = 4, /* Global allowing SMEM. */
49 AC_ADDR_SPACE_CONST_32BIT = 6, /* same as CONST, but the pointer type has 32 bits */
50 };
51
52 #define AC_WAIT_LGKM (1 << 0) /* LDS, GDS, constant, message */
53 #define AC_WAIT_VLOAD (1 << 1) /* VMEM load/sample instructions */
54 #define AC_WAIT_VSTORE (1 << 2) /* VMEM store instructions */
55
56 struct ac_llvm_flow;
57 struct ac_llvm_compiler;
58
59 struct ac_llvm_flow_state {
60 struct ac_llvm_flow *stack;
61 unsigned depth_max;
62 unsigned depth;
63 };
64
65 struct ac_llvm_context {
66 LLVMContextRef context;
67 LLVMModuleRef module;
68 LLVMBuilderRef builder;
69
70 LLVMValueRef main_function;
71
72 LLVMTypeRef voidt;
73 LLVMTypeRef i1;
74 LLVMTypeRef i8;
75 LLVMTypeRef i16;
76 LLVMTypeRef i32;
77 LLVMTypeRef i64;
78 LLVMTypeRef i128;
79 LLVMTypeRef intptr;
80 LLVMTypeRef f16;
81 LLVMTypeRef f32;
82 LLVMTypeRef f64;
83 LLVMTypeRef v2i16;
84 LLVMTypeRef v4i16;
85 LLVMTypeRef v2f16;
86 LLVMTypeRef v4f16;
87 LLVMTypeRef v2i32;
88 LLVMTypeRef v3i32;
89 LLVMTypeRef v4i32;
90 LLVMTypeRef v2f32;
91 LLVMTypeRef v3f32;
92 LLVMTypeRef v4f32;
93 LLVMTypeRef v8i32;
94 LLVMTypeRef iN_wavemask;
95 LLVMTypeRef iN_ballotmask;
96
97 LLVMValueRef i8_0;
98 LLVMValueRef i8_1;
99 LLVMValueRef i16_0;
100 LLVMValueRef i16_1;
101 LLVMValueRef i32_0;
102 LLVMValueRef i32_1;
103 LLVMValueRef i64_0;
104 LLVMValueRef i64_1;
105 LLVMValueRef i128_0;
106 LLVMValueRef i128_1;
107 LLVMValueRef f16_0;
108 LLVMValueRef f16_1;
109 LLVMValueRef f32_0;
110 LLVMValueRef f32_1;
111 LLVMValueRef f64_0;
112 LLVMValueRef f64_1;
113 LLVMValueRef i1true;
114 LLVMValueRef i1false;
115
116 /* Temporary helper to implement demote_to_helper:
117 * True = live lanes
118 * False = demoted lanes
119 */
120 LLVMValueRef postponed_kill;
121 bool conditional_demote_seen;
122
123 /* Since ac_nir_translate makes a local copy of ac_llvm_context, there
124 * are two ac_llvm_contexts. Declare a pointer here, so that the control
125 * flow stack is shared by both ac_llvm_contexts.
126 */
127 struct ac_llvm_flow_state *flow;
128
129 unsigned range_md_kind;
130 unsigned invariant_load_md_kind;
131 unsigned uniform_md_kind;
132 LLVMValueRef empty_md;
133
134 enum chip_class chip_class;
135 enum radeon_family family;
136 const struct radeon_info *info;
137
138 unsigned wave_size;
139 unsigned ballot_mask_bits;
140
141 unsigned float_mode;
142
143 LLVMValueRef lds;
144 };
145
146 void ac_llvm_context_init(struct ac_llvm_context *ctx, struct ac_llvm_compiler *compiler,
147 enum chip_class chip_class, enum radeon_family family,
148 const struct radeon_info *info,
149 enum ac_float_mode float_mode, unsigned wave_size,
150 unsigned ballot_mask_bits);
151
152 void ac_llvm_context_dispose(struct ac_llvm_context *ctx);
153
154 int ac_get_llvm_num_components(LLVMValueRef value);
155
156 int ac_get_elem_bits(struct ac_llvm_context *ctx, LLVMTypeRef type);
157
158 LLVMValueRef ac_llvm_extract_elem(struct ac_llvm_context *ac, LLVMValueRef value, int index);
159
160 unsigned ac_get_type_size(LLVMTypeRef type);
161
162 LLVMTypeRef ac_to_integer_type(struct ac_llvm_context *ctx, LLVMTypeRef t);
163 LLVMValueRef ac_to_integer(struct ac_llvm_context *ctx, LLVMValueRef v);
164 LLVMValueRef ac_to_integer_or_pointer(struct ac_llvm_context *ctx, LLVMValueRef v);
165 LLVMTypeRef ac_to_float_type(struct ac_llvm_context *ctx, LLVMTypeRef t);
166 LLVMValueRef ac_to_float(struct ac_llvm_context *ctx, LLVMValueRef v);
167
168 LLVMValueRef ac_build_intrinsic(struct ac_llvm_context *ctx, const char *name,
169 LLVMTypeRef return_type, LLVMValueRef *params, unsigned param_count,
170 unsigned attrib_mask);
171
172 void ac_build_type_name_for_intr(LLVMTypeRef type, char *buf, unsigned bufsize);
173
174 LLVMValueRef ac_build_phi(struct ac_llvm_context *ctx, LLVMTypeRef type, unsigned count_incoming,
175 LLVMValueRef *values, LLVMBasicBlockRef *blocks);
176
177 void ac_build_s_barrier(struct ac_llvm_context *ctx);
178 void ac_build_optimization_barrier(struct ac_llvm_context *ctx, LLVMValueRef *pgpr, bool sgpr);
179
180 LLVMValueRef ac_build_shader_clock(struct ac_llvm_context *ctx, nir_scope scope);
181
182 LLVMValueRef ac_build_ballot(struct ac_llvm_context *ctx, LLVMValueRef value);
183 LLVMValueRef ac_get_i1_sgpr_mask(struct ac_llvm_context *ctx, LLVMValueRef value);
184
185 LLVMValueRef ac_build_vote_all(struct ac_llvm_context *ctx, LLVMValueRef value);
186
187 LLVMValueRef ac_build_vote_any(struct ac_llvm_context *ctx, LLVMValueRef value);
188
189 LLVMValueRef ac_build_vote_eq(struct ac_llvm_context *ctx, LLVMValueRef value);
190
191 LLVMValueRef ac_build_varying_gather_values(struct ac_llvm_context *ctx, LLVMValueRef *values,
192 unsigned value_count, unsigned component);
193
194 LLVMValueRef ac_build_gather_values_extended(struct ac_llvm_context *ctx, LLVMValueRef *values,
195 unsigned value_count, unsigned value_stride, bool load,
196 bool always_vector);
197 LLVMValueRef ac_build_gather_values(struct ac_llvm_context *ctx, LLVMValueRef *values,
198 unsigned value_count);
199
200 LLVMValueRef ac_build_concat(struct ac_llvm_context *ctx, LLVMValueRef a, LLVMValueRef b);
201
202 LLVMValueRef ac_extract_components(struct ac_llvm_context *ctx, LLVMValueRef value, unsigned start,
203 unsigned channels);
204
205 LLVMValueRef ac_build_expand(struct ac_llvm_context *ctx, LLVMValueRef value,
206 unsigned src_channels, unsigned dst_channels);
207
208 LLVMValueRef ac_build_expand_to_vec4(struct ac_llvm_context *ctx, LLVMValueRef value,
209 unsigned num_channels);
210 LLVMValueRef ac_build_round(struct ac_llvm_context *ctx, LLVMValueRef value);
211
212 LLVMValueRef ac_build_fdiv(struct ac_llvm_context *ctx, LLVMValueRef num, LLVMValueRef den);
213
214 LLVMValueRef ac_build_fast_udiv(struct ac_llvm_context *ctx, LLVMValueRef num,
215 LLVMValueRef multiplier, LLVMValueRef pre_shift,
216 LLVMValueRef post_shift, LLVMValueRef increment);
217 LLVMValueRef ac_build_fast_udiv_nuw(struct ac_llvm_context *ctx, LLVMValueRef num,
218 LLVMValueRef multiplier, LLVMValueRef pre_shift,
219 LLVMValueRef post_shift, LLVMValueRef increment);
220 LLVMValueRef ac_build_fast_udiv_u31_d_not_one(struct ac_llvm_context *ctx, LLVMValueRef num,
221 LLVMValueRef multiplier, LLVMValueRef post_shift);
222
223 void ac_prepare_cube_coords(struct ac_llvm_context *ctx, bool is_deriv, bool is_array, bool is_lod,
224 LLVMValueRef *coords_arg, LLVMValueRef *derivs_arg);
225
226 LLVMValueRef ac_build_fs_interp(struct ac_llvm_context *ctx, LLVMValueRef llvm_chan,
227 LLVMValueRef attr_number, LLVMValueRef params, LLVMValueRef i,
228 LLVMValueRef j);
229
230 LLVMValueRef ac_build_fs_interp_f16(struct ac_llvm_context *ctx, LLVMValueRef llvm_chan,
231 LLVMValueRef attr_number, LLVMValueRef params, LLVMValueRef i,
232 LLVMValueRef j, bool high_16bits);
233
234 LLVMValueRef ac_build_fs_interp_mov(struct ac_llvm_context *ctx, LLVMValueRef parameter,
235 LLVMValueRef llvm_chan, LLVMValueRef attr_number,
236 LLVMValueRef params);
237
238 LLVMValueRef ac_build_gep_ptr(struct ac_llvm_context *ctx, LLVMValueRef base_ptr,
239 LLVMValueRef index);
240
241 LLVMValueRef ac_build_gep0(struct ac_llvm_context *ctx, LLVMValueRef base_ptr, LLVMValueRef index);
242 LLVMValueRef ac_build_pointer_add(struct ac_llvm_context *ctx, LLVMValueRef ptr,
243 LLVMValueRef index);
244
245 void ac_build_indexed_store(struct ac_llvm_context *ctx, LLVMValueRef base_ptr, LLVMValueRef index,
246 LLVMValueRef value);
247
248 LLVMValueRef ac_build_load(struct ac_llvm_context *ctx, LLVMValueRef base_ptr, LLVMValueRef index);
249 LLVMValueRef ac_build_load_invariant(struct ac_llvm_context *ctx, LLVMValueRef base_ptr,
250 LLVMValueRef index);
251 LLVMValueRef ac_build_load_to_sgpr(struct ac_llvm_context *ctx, LLVMValueRef base_ptr,
252 LLVMValueRef index);
253 LLVMValueRef ac_build_load_to_sgpr_uint_wraparound(struct ac_llvm_context *ctx,
254 LLVMValueRef base_ptr, LLVMValueRef index);
255
256 void ac_build_buffer_store_dword(struct ac_llvm_context *ctx, LLVMValueRef rsrc, LLVMValueRef vdata,
257 unsigned num_channels, LLVMValueRef voffset, LLVMValueRef soffset,
258 unsigned inst_offset, unsigned cache_policy);
259
260 void ac_build_buffer_store_format(struct ac_llvm_context *ctx, LLVMValueRef rsrc, LLVMValueRef data,
261 LLVMValueRef vindex, LLVMValueRef voffset, unsigned cache_policy);
262
263 LLVMValueRef ac_build_buffer_load(struct ac_llvm_context *ctx, LLVMValueRef rsrc, int num_channels,
264 LLVMValueRef vindex, LLVMValueRef voffset, LLVMValueRef soffset,
265 unsigned inst_offset, LLVMTypeRef channel_type,
266 unsigned cache_policy, bool can_speculate, bool allow_smem);
267
268 LLVMValueRef ac_build_buffer_load_format(struct ac_llvm_context *ctx, LLVMValueRef rsrc,
269 LLVMValueRef vindex, LLVMValueRef voffset,
270 unsigned num_channels, unsigned cache_policy,
271 bool can_speculate, bool d16, bool tfe);
272
273 LLVMValueRef ac_build_tbuffer_load_short(struct ac_llvm_context *ctx, LLVMValueRef rsrc,
274 LLVMValueRef voffset, LLVMValueRef soffset,
275 LLVMValueRef immoffset, unsigned cache_policy);
276
277 LLVMValueRef ac_build_tbuffer_load_byte(struct ac_llvm_context *ctx, LLVMValueRef rsrc,
278 LLVMValueRef voffset, LLVMValueRef soffset,
279 LLVMValueRef immoffset, unsigned cache_policy);
280
281 LLVMValueRef ac_build_struct_tbuffer_load(struct ac_llvm_context *ctx, LLVMValueRef rsrc,
282 LLVMValueRef vindex, LLVMValueRef voffset,
283 LLVMValueRef soffset, LLVMValueRef immoffset,
284 unsigned num_channels, unsigned dfmt, unsigned nfmt,
285 unsigned cache_policy, bool can_speculate);
286
287 LLVMValueRef ac_build_raw_tbuffer_load(struct ac_llvm_context *ctx, LLVMValueRef rsrc,
288 LLVMValueRef voffset, LLVMValueRef soffset,
289 LLVMValueRef immoffset, unsigned num_channels, unsigned dfmt,
290 unsigned nfmt, unsigned cache_policy, bool can_speculate);
291
292
293 LLVMValueRef ac_build_opencoded_load_format(struct ac_llvm_context *ctx, unsigned log_size,
294 unsigned num_channels, unsigned format, bool reverse,
295 bool known_aligned, LLVMValueRef rsrc,
296 LLVMValueRef vindex, LLVMValueRef voffset,
297 LLVMValueRef soffset, unsigned cache_policy,
298 bool can_speculate);
299
300 void ac_build_tbuffer_store_short(struct ac_llvm_context *ctx, LLVMValueRef rsrc,
301 LLVMValueRef vdata, LLVMValueRef voffset, LLVMValueRef soffset,
302 unsigned cache_policy);
303
304 void ac_build_tbuffer_store_byte(struct ac_llvm_context *ctx, LLVMValueRef rsrc, LLVMValueRef vdata,
305 LLVMValueRef voffset, LLVMValueRef soffset, unsigned cache_policy);
306
307 void ac_build_struct_tbuffer_store(struct ac_llvm_context *ctx, LLVMValueRef rsrc,
308 LLVMValueRef vdata, LLVMValueRef vindex, LLVMValueRef voffset,
309 LLVMValueRef soffset, LLVMValueRef immoffset,
310 unsigned num_channels, unsigned dfmt, unsigned nfmt,
311 unsigned cache_policy);
312
313 void ac_build_raw_tbuffer_store(struct ac_llvm_context *ctx, LLVMValueRef rsrc, LLVMValueRef vdata,
314 LLVMValueRef voffset, LLVMValueRef soffset, LLVMValueRef immoffset,
315 unsigned num_channels, unsigned dfmt, unsigned nfmt,
316 unsigned cache_policy);
317
318 void ac_set_range_metadata(struct ac_llvm_context *ctx, LLVMValueRef value, unsigned lo,
319 unsigned hi);
320 LLVMValueRef ac_get_thread_id(struct ac_llvm_context *ctx);
321
322 #define AC_TID_MASK_TOP_LEFT 0xfffffffc
323 #define AC_TID_MASK_TOP 0xfffffffd
324 #define AC_TID_MASK_LEFT 0xfffffffe
325
326 LLVMValueRef ac_build_ddxy(struct ac_llvm_context *ctx, uint32_t mask, int idx, LLVMValueRef val);
327
328 #define AC_SENDMSG_GS 2
329 #define AC_SENDMSG_GS_DONE 3
330 #define AC_SENDMSG_GS_ALLOC_REQ 9
331
332 #define AC_SENDMSG_GS_OP_NOP (0 << 4)
333 #define AC_SENDMSG_GS_OP_CUT (1 << 4)
334 #define AC_SENDMSG_GS_OP_EMIT (2 << 4)
335 #define AC_SENDMSG_GS_OP_EMIT_CUT (3 << 4)
336
337 void ac_build_sendmsg(struct ac_llvm_context *ctx, uint32_t msg, LLVMValueRef wave_id);
338
339 LLVMValueRef ac_build_imsb(struct ac_llvm_context *ctx, LLVMValueRef arg, LLVMTypeRef dst_type);
340
341 LLVMValueRef ac_build_umsb(struct ac_llvm_context *ctx, LLVMValueRef arg, LLVMTypeRef dst_type);
342 LLVMValueRef ac_build_fmin(struct ac_llvm_context *ctx, LLVMValueRef a, LLVMValueRef b);
343 LLVMValueRef ac_build_fmax(struct ac_llvm_context *ctx, LLVMValueRef a, LLVMValueRef b);
344 LLVMValueRef ac_build_imin(struct ac_llvm_context *ctx, LLVMValueRef a, LLVMValueRef b);
345 LLVMValueRef ac_build_imax(struct ac_llvm_context *ctx, LLVMValueRef a, LLVMValueRef b);
346 LLVMValueRef ac_build_umin(struct ac_llvm_context *ctx, LLVMValueRef a, LLVMValueRef b);
347 LLVMValueRef ac_build_umax(struct ac_llvm_context *ctx, LLVMValueRef a, LLVMValueRef b);
348 LLVMValueRef ac_build_clamp(struct ac_llvm_context *ctx, LLVMValueRef value);
349
350 struct ac_export_args {
351 LLVMValueRef out[4];
352 unsigned target;
353 unsigned enabled_channels;
354 bool compr;
355 bool done;
356 bool valid_mask;
357 };
358
359 void ac_build_export(struct ac_llvm_context *ctx, struct ac_export_args *a);
360
361 void ac_build_export_null(struct ac_llvm_context *ctx);
362
363 enum ac_image_opcode
364 {
365 ac_image_sample,
366 ac_image_gather4,
367 ac_image_load,
368 ac_image_load_mip,
369 ac_image_store,
370 ac_image_store_mip,
371 ac_image_get_lod,
372 ac_image_get_resinfo,
373 ac_image_atomic,
374 ac_image_atomic_cmpswap,
375 };
376
377 enum ac_atomic_op
378 {
379 ac_atomic_swap,
380 ac_atomic_add,
381 ac_atomic_sub,
382 ac_atomic_smin,
383 ac_atomic_umin,
384 ac_atomic_smax,
385 ac_atomic_umax,
386 ac_atomic_and,
387 ac_atomic_or,
388 ac_atomic_xor,
389 ac_atomic_inc_wrap,
390 ac_atomic_dec_wrap,
391 ac_atomic_fmin,
392 ac_atomic_fmax,
393 };
394
395 /* These cache policy bits match the definitions used by the LLVM intrinsics. */
396 enum ac_image_cache_policy
397 {
398 ac_glc = 1 << 0, /* per-CU cache control */
399 ac_slc = 1 << 1, /* global L2 cache control */
400 ac_dlc = 1 << 2, /* per-shader-array cache control */
401 ac_swizzled = 1 << 3, /* the access is swizzled, disabling load/store merging */
402 };
403
404 struct ac_image_args {
405 enum ac_image_opcode opcode;
406 enum ac_atomic_op atomic; /* for the ac_image_atomic opcode */
407 enum ac_image_dim dim;
408 unsigned dmask : 4;
409 unsigned cache_policy : 3;
410 bool unorm : 1;
411 bool level_zero : 1;
412 bool d16 : 1; /* GFX8+: data and return values are 16-bit */
413 bool a16 : 1; /* GFX9+: address components except compare, offset and bias are 16-bit */
414 bool g16 : 1; /* GFX10+: derivatives are 16-bit; GFX<=9: must be equal to a16 */
415 bool tfe : 1;
416 unsigned attributes; /* additional call-site specific AC_FUNC_ATTRs */
417
418 LLVMValueRef resource;
419 LLVMValueRef sampler;
420 LLVMValueRef data[2]; /* data[0] is source data (vector); data[1] is cmp for cmpswap */
421 LLVMValueRef offset;
422 LLVMValueRef bias;
423 LLVMValueRef compare;
424 LLVMValueRef derivs[6];
425 LLVMValueRef coords[4];
426 LLVMValueRef lod; // also used by ac_image_get_resinfo
427 LLVMValueRef min_lod;
428 };
429
430 LLVMValueRef ac_build_image_opcode(struct ac_llvm_context *ctx, struct ac_image_args *a);
431 LLVMValueRef ac_build_image_get_sample_count(struct ac_llvm_context *ctx, LLVMValueRef rsrc);
432 LLVMValueRef ac_build_cvt_pkrtz_f16(struct ac_llvm_context *ctx, LLVMValueRef args[2]);
433 LLVMValueRef ac_build_cvt_pknorm_i16(struct ac_llvm_context *ctx, LLVMValueRef args[2]);
434 LLVMValueRef ac_build_cvt_pknorm_u16(struct ac_llvm_context *ctx, LLVMValueRef args[2]);
435 LLVMValueRef ac_build_cvt_pknorm_i16_f16(struct ac_llvm_context *ctx, LLVMValueRef args[2]);
436 LLVMValueRef ac_build_cvt_pknorm_u16_f16(struct ac_llvm_context *ctx, LLVMValueRef args[2]);
437 LLVMValueRef ac_build_cvt_pk_i16(struct ac_llvm_context *ctx, LLVMValueRef args[2], unsigned bits,
438 bool hi);
439 LLVMValueRef ac_build_cvt_pk_u16(struct ac_llvm_context *ctx, LLVMValueRef args[2], unsigned bits,
440 bool hi);
441 LLVMValueRef ac_build_wqm_vote(struct ac_llvm_context *ctx, LLVMValueRef i1);
442 void ac_build_kill_if_false(struct ac_llvm_context *ctx, LLVMValueRef i1);
443 LLVMValueRef ac_build_bfe(struct ac_llvm_context *ctx, LLVMValueRef input, LLVMValueRef offset,
444 LLVMValueRef width, bool is_signed);
445 LLVMValueRef ac_build_imad(struct ac_llvm_context *ctx, LLVMValueRef s0, LLVMValueRef s1,
446 LLVMValueRef s2);
447 LLVMValueRef ac_build_fmad(struct ac_llvm_context *ctx, LLVMValueRef s0, LLVMValueRef s1,
448 LLVMValueRef s2);
449
450 void ac_build_waitcnt(struct ac_llvm_context *ctx, unsigned wait_flags);
451
452 LLVMValueRef ac_build_fract(struct ac_llvm_context *ctx, LLVMValueRef src0, unsigned bitsize);
453 LLVMValueRef ac_const_uint_vec(struct ac_llvm_context *ctx, LLVMTypeRef type, uint64_t value);
454 LLVMValueRef ac_build_isign(struct ac_llvm_context *ctx, LLVMValueRef src0);
455 LLVMValueRef ac_build_fsign(struct ac_llvm_context *ctx, LLVMValueRef src);
456 LLVMValueRef ac_build_bit_count(struct ac_llvm_context *ctx, LLVMValueRef src0);
457
458 LLVMValueRef ac_build_fsat(struct ac_llvm_context *ctx, LLVMValueRef src,
459 LLVMTypeRef type);
460
461 LLVMValueRef ac_build_bitfield_reverse(struct ac_llvm_context *ctx, LLVMValueRef src0);
462
463 void ac_optimize_vs_outputs(struct ac_llvm_context *ac, LLVMValueRef main_fn,
464 uint8_t *vs_output_param_offset, uint32_t num_outputs,
465 uint32_t skip_output_mask, uint8_t *num_param_exports);
466 void ac_init_exec_full_mask(struct ac_llvm_context *ctx);
467
468 void ac_declare_lds_as_pointer(struct ac_llvm_context *ac);
469 LLVMValueRef ac_lds_load(struct ac_llvm_context *ctx, LLVMValueRef dw_addr);
470 void ac_lds_store(struct ac_llvm_context *ctx, LLVMValueRef dw_addr, LLVMValueRef value);
471
472 LLVMValueRef ac_find_lsb(struct ac_llvm_context *ctx, LLVMTypeRef dst_type, LLVMValueRef src0);
473
474 LLVMTypeRef ac_array_in_const_addr_space(LLVMTypeRef elem_type);
475 LLVMTypeRef ac_array_in_const32_addr_space(LLVMTypeRef elem_type);
476
477 void ac_build_bgnloop(struct ac_llvm_context *ctx, int lable_id);
478 void ac_build_break(struct ac_llvm_context *ctx);
479 void ac_build_continue(struct ac_llvm_context *ctx);
480 void ac_build_else(struct ac_llvm_context *ctx, int lable_id);
481 void ac_build_endif(struct ac_llvm_context *ctx, int lable_id);
482 void ac_build_endloop(struct ac_llvm_context *ctx, int lable_id);
483 void ac_build_ifcc(struct ac_llvm_context *ctx, LLVMValueRef cond, int label_id);
484
485 LLVMValueRef ac_build_alloca(struct ac_llvm_context *ac, LLVMTypeRef type, const char *name);
486 LLVMValueRef ac_build_alloca_undef(struct ac_llvm_context *ac, LLVMTypeRef type, const char *name);
487 LLVMValueRef ac_build_alloca_init(struct ac_llvm_context *ac, LLVMValueRef val, const char *name);
488
489 LLVMValueRef ac_cast_ptr(struct ac_llvm_context *ctx, LLVMValueRef ptr, LLVMTypeRef type);
490
491 LLVMValueRef ac_trim_vector(struct ac_llvm_context *ctx, LLVMValueRef value, unsigned count);
492
493 LLVMValueRef ac_unpack_param(struct ac_llvm_context *ctx, LLVMValueRef param, unsigned rshift,
494 unsigned bitwidth);
495
496 void ac_apply_fmask_to_sample(struct ac_llvm_context *ac, LLVMValueRef fmask, LLVMValueRef *addr,
497 bool is_array_tex);
498
499 LLVMValueRef ac_build_ds_swizzle(struct ac_llvm_context *ctx, LLVMValueRef src, unsigned mask);
500
501 LLVMValueRef ac_build_readlane_no_opt_barrier(struct ac_llvm_context *ctx, LLVMValueRef src,
502 LLVMValueRef lane);
503
504 LLVMValueRef ac_build_readlane(struct ac_llvm_context *ctx, LLVMValueRef src, LLVMValueRef lane);
505
506 LLVMValueRef ac_build_writelane(struct ac_llvm_context *ctx, LLVMValueRef src, LLVMValueRef value,
507 LLVMValueRef lane);
508
509 LLVMValueRef ac_build_mbcnt_add(struct ac_llvm_context *ctx, LLVMValueRef mask, LLVMValueRef add_src);
510 LLVMValueRef ac_build_mbcnt(struct ac_llvm_context *ctx, LLVMValueRef mask);
511
512 LLVMValueRef ac_build_inclusive_scan(struct ac_llvm_context *ctx, LLVMValueRef src, nir_op op);
513
514 LLVMValueRef ac_build_exclusive_scan(struct ac_llvm_context *ctx, LLVMValueRef src, nir_op op);
515
516 LLVMValueRef ac_build_reduce(struct ac_llvm_context *ctx, LLVMValueRef src, nir_op op,
517 unsigned cluster_size);
518
519 /**
520 * Common arguments for a scan/reduce operation that accumulates per-wave
521 * values across an entire workgroup, while respecting the order of waves.
522 */
523 struct ac_wg_scan {
524 bool enable_reduce;
525 bool enable_exclusive;
526 bool enable_inclusive;
527 nir_op op;
528 LLVMValueRef src; /* clobbered! */
529 LLVMValueRef result_reduce;
530 LLVMValueRef result_exclusive;
531 LLVMValueRef result_inclusive;
532 LLVMValueRef extra;
533 LLVMValueRef waveidx;
534 LLVMValueRef numwaves; /* only needed for "reduce" operations */
535
536 /* T addrspace(LDS) pointer to the same type as value, at least maxwaves entries */
537 LLVMValueRef scratch;
538 unsigned maxwaves;
539 };
540
541 void ac_build_wg_wavescan_top(struct ac_llvm_context *ctx, struct ac_wg_scan *ws);
542 void ac_build_wg_wavescan_bottom(struct ac_llvm_context *ctx, struct ac_wg_scan *ws);
543 void ac_build_wg_wavescan(struct ac_llvm_context *ctx, struct ac_wg_scan *ws);
544
545 void ac_build_wg_scan_top(struct ac_llvm_context *ctx, struct ac_wg_scan *ws);
546 void ac_build_wg_scan_bottom(struct ac_llvm_context *ctx, struct ac_wg_scan *ws);
547 void ac_build_wg_scan(struct ac_llvm_context *ctx, struct ac_wg_scan *ws);
548
549 LLVMValueRef ac_build_quad_swizzle(struct ac_llvm_context *ctx, LLVMValueRef src, unsigned lane0,
550 unsigned lane1, unsigned lane2, unsigned lane3);
551
552 LLVMValueRef ac_build_shuffle(struct ac_llvm_context *ctx, LLVMValueRef src, LLVMValueRef index);
553
554 LLVMValueRef ac_build_frexp_exp(struct ac_llvm_context *ctx, LLVMValueRef src0, unsigned bitsize);
555
556 LLVMValueRef ac_build_frexp_mant(struct ac_llvm_context *ctx, LLVMValueRef src0, unsigned bitsize);
557
558 LLVMValueRef ac_build_canonicalize(struct ac_llvm_context *ctx, LLVMValueRef src0,
559 unsigned bitsize);
560
561 LLVMValueRef ac_build_ddxy_interp(struct ac_llvm_context *ctx, LLVMValueRef interp_ij);
562
563 LLVMValueRef ac_build_load_helper_invocation(struct ac_llvm_context *ctx);
564
565 LLVMValueRef ac_build_is_helper_invocation(struct ac_llvm_context *ctx);
566
567 LLVMValueRef ac_build_call(struct ac_llvm_context *ctx, LLVMValueRef func, LLVMValueRef *args,
568 unsigned num_args);
569
570 LLVMValueRef ac_build_atomic_rmw(struct ac_llvm_context *ctx, LLVMAtomicRMWBinOp op,
571 LLVMValueRef ptr, LLVMValueRef val, const char *sync_scope);
572
573 LLVMValueRef ac_build_atomic_cmp_xchg(struct ac_llvm_context *ctx, LLVMValueRef ptr,
574 LLVMValueRef cmp, LLVMValueRef val, const char *sync_scope);
575
576 void ac_export_mrt_z(struct ac_llvm_context *ctx, LLVMValueRef depth, LLVMValueRef stencil,
577 LLVMValueRef samplemask, struct ac_export_args *args);
578
579 void ac_build_sendmsg_gs_alloc_req(struct ac_llvm_context *ctx, LLVMValueRef wave_id,
580 LLVMValueRef vtx_cnt, LLVMValueRef prim_cnt);
581
582 struct ac_ngg_prim {
583 unsigned num_vertices;
584 LLVMValueRef isnull;
585 LLVMValueRef index[3];
586 LLVMValueRef edgeflags;
587 LLVMValueRef passthrough;
588 };
589
590 LLVMValueRef ac_pack_edgeflags_for_export(struct ac_llvm_context *ctx,
591 const struct ac_shader_args *args);
592 LLVMValueRef ac_pack_prim_export(struct ac_llvm_context *ctx, const struct ac_ngg_prim *prim);
593 void ac_build_export_prim(struct ac_llvm_context *ctx, const struct ac_ngg_prim *prim);
594
ac_get_arg(struct ac_llvm_context * ctx,struct ac_arg arg)595 static inline LLVMValueRef ac_get_arg(struct ac_llvm_context *ctx, struct ac_arg arg)
596 {
597 assert(arg.used);
598 return LLVMGetParam(ctx->main_function, arg.arg_index);
599 }
600
601 enum ac_llvm_calling_convention
602 {
603 AC_LLVM_AMDGPU_VS = 87,
604 AC_LLVM_AMDGPU_GS = 88,
605 AC_LLVM_AMDGPU_PS = 89,
606 AC_LLVM_AMDGPU_CS = 90,
607 AC_LLVM_AMDGPU_HS = 93,
608 };
609
610 LLVMValueRef ac_build_main(const struct ac_shader_args *args, struct ac_llvm_context *ctx,
611 enum ac_llvm_calling_convention convention, const char *name,
612 LLVMTypeRef ret_type, LLVMModuleRef module);
613 void ac_build_s_endpgm(struct ac_llvm_context *ctx);
614
615 void ac_build_triangle_strip_indices_to_triangle(struct ac_llvm_context *ctx, LLVMValueRef is_odd,
616 LLVMValueRef flatshade_first,
617 LLVMValueRef index[3]);
618
619 #ifdef __cplusplus
620 }
621 #endif
622
623 #endif
624