• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2013 Rob Clark <robdclark@gmail.com>
3  * SPDX-License-Identifier: MIT
4  */
5 
6 #ifndef INSTR_A3XX_H_
7 #define INSTR_A3XX_H_
8 
9 #define PACKED __attribute__((__packed__))
10 
11 #include <assert.h>
12 #include <stdbool.h>
13 #include <stdint.h>
14 #include <stdio.h>
15 
16 /* clang-format off */
17 void ir3_assert_handler(const char *expr, const char *file, int line,
18                         const char *func) __attribute__((weak)) __attribute__((__noreturn__));
19 /* clang-format on */
20 
21 /* A wrapper for assert() that allows overriding handling of a failed
22  * assert.  This is needed for tools like crashdec which can want to
23  * attempt to disassemble memory that might not actually be valid
24  * instructions.
25  */
26 #define ir3_assert(expr)                                                       \
27    do {                                                                        \
28       if (!(expr)) {                                                           \
29          if (ir3_assert_handler) {                                             \
30             ir3_assert_handler(#expr, __FILE__, __LINE__, __func__);           \
31          }                                                                     \
32          assert(expr);                                                         \
33       }                                                                        \
34    } while (0)
35 /* size of largest OPC field of all the instruction categories: */
36 #define NOPC_BITS 7
37 
38 #define _OPC(cat, opc) (((cat) << NOPC_BITS) | opc)
39 
40 /* clang-format off */
41 typedef enum {
42    /* category 0: */
43    OPC_NOP             = _OPC(0, 0),
44    OPC_JUMP            = _OPC(0, 2),
45    OPC_CALL            = _OPC(0, 3),
46    OPC_RET             = _OPC(0, 4),
47    OPC_KILL            = _OPC(0, 5),
48    OPC_END             = _OPC(0, 6),
49    OPC_EMIT            = _OPC(0, 7),
50    OPC_CUT             = _OPC(0, 8),
51    OPC_CHMASK          = _OPC(0, 9),
52    OPC_CHSH            = _OPC(0, 10),
53    OPC_FLOW_REV        = _OPC(0, 11),
54 
55    OPC_BKT             = _OPC(0, 16),
56    OPC_STKS            = _OPC(0, 17),
57    OPC_STKR            = _OPC(0, 18),
58    OPC_XSET            = _OPC(0, 19),
59    OPC_XCLR            = _OPC(0, 20),
60    OPC_GETONE          = _OPC(0, 21),
61    OPC_DBG             = _OPC(0, 22),
62    OPC_SHPS            = _OPC(0, 23),   /* shader prologue start */
63    OPC_SHPE            = _OPC(0, 24),   /* shader prologue end */
64    OPC_GETLAST         = _OPC(0, 25),
65 
66    OPC_PREDT           = _OPC(0, 29),   /* predicated true */
67    OPC_PREDF           = _OPC(0, 30),   /* predicated false */
68    OPC_PREDE           = _OPC(0, 31),   /* predicated end */
69 
70    /* Logical opcodes for different branch instruction variations: */
71    OPC_BR              = _OPC(0, 40),
72    OPC_BRAO            = _OPC(0, 41),
73    OPC_BRAA            = _OPC(0, 42),
74    OPC_BRAC            = _OPC(0, 43),
75    OPC_BANY            = _OPC(0, 44),
76    OPC_BALL            = _OPC(0, 45),
77    OPC_BRAX            = _OPC(0, 46),
78 
79    /* Logical opcode to distinguish kill and demote */
80    OPC_DEMOTE          = _OPC(0, 47),
81 
82    /* category 1: */
83    OPC_MOV             = _OPC(1, 0),
84    OPC_MOVP            = _OPC(1, 1),
85    /* swz, gat, sct */
86    OPC_MOVMSK          = _OPC(1, 3),
87 
88    /* Virtual opcodes for instructions differentiated via a "sub-opcode" that
89     * replaces the repeat field:
90     */
91    OPC_SWZ            = _OPC(1, 4),
92    OPC_GAT            = _OPC(1, 5),
93    OPC_SCT            = _OPC(1, 6),
94 
95    /* Logical opcodes for different variants of mov: */
96    OPC_MOV_IMMED       = _OPC(1, 40),
97    OPC_MOV_CONST       = _OPC(1, 41),
98    OPC_MOV_GPR         = _OPC(1, 42),
99    OPC_MOV_RELGPR      = _OPC(1, 43),
100    OPC_MOV_RELCONST    = _OPC(1, 44),
101 
102    /* Macros that expand to an if statement + move */
103    OPC_BALLOT_MACRO    = _OPC(1, 50),
104    OPC_ANY_MACRO       = _OPC(1, 51),
105    OPC_ALL_MACRO       = _OPC(1, 52),
106    OPC_ELECT_MACRO     = _OPC(1, 53),
107    OPC_READ_COND_MACRO = _OPC(1, 54),
108    OPC_READ_FIRST_MACRO = _OPC(1, 55),
109    OPC_SHPS_MACRO       = _OPC(1, 56),
110    OPC_READ_GETLAST_MACRO = _OPC(1, 57),
111 
112    /* Macros that expand to a loop */
113    OPC_SCAN_MACRO      = _OPC(1, 58),
114    OPC_SCAN_CLUSTERS_MACRO = _OPC(1, 60),
115 
116    /* category 2: */
117    OPC_ADD_F           = _OPC(2, 0),
118    OPC_MIN_F           = _OPC(2, 1),
119    OPC_MAX_F           = _OPC(2, 2),
120    OPC_MUL_F           = _OPC(2, 3),
121    OPC_SIGN_F          = _OPC(2, 4),
122    OPC_CMPS_F          = _OPC(2, 5),
123    OPC_ABSNEG_F        = _OPC(2, 6),
124    OPC_CMPV_F          = _OPC(2, 7),
125    /* 8 - invalid */
126    OPC_FLOOR_F         = _OPC(2, 9),
127    OPC_CEIL_F          = _OPC(2, 10),
128    OPC_RNDNE_F         = _OPC(2, 11),
129    OPC_RNDAZ_F         = _OPC(2, 12),
130    OPC_TRUNC_F         = _OPC(2, 13),
131    /* 14-15 - invalid */
132    OPC_ADD_U           = _OPC(2, 16),
133    OPC_ADD_S           = _OPC(2, 17),
134    OPC_SUB_U           = _OPC(2, 18),
135    OPC_SUB_S           = _OPC(2, 19),
136    OPC_CMPS_U          = _OPC(2, 20),
137    OPC_CMPS_S          = _OPC(2, 21),
138    OPC_MIN_U           = _OPC(2, 22),
139    OPC_MIN_S           = _OPC(2, 23),
140    OPC_MAX_U           = _OPC(2, 24),
141    OPC_MAX_S           = _OPC(2, 25),
142    OPC_ABSNEG_S        = _OPC(2, 26),
143    /* 27 - invalid */
144    OPC_AND_B           = _OPC(2, 28),
145    OPC_OR_B            = _OPC(2, 29),
146    OPC_NOT_B           = _OPC(2, 30),
147    OPC_XOR_B           = _OPC(2, 31),
148    /* 32 - invalid */
149    OPC_CMPV_U          = _OPC(2, 33),
150    OPC_CMPV_S          = _OPC(2, 34),
151    /* 35-47 - invalid */
152    OPC_MUL_U24         = _OPC(2, 48), /* 24b mul into 32b result */
153    OPC_MUL_S24         = _OPC(2, 49), /* 24b mul into 32b result with sign extension */
154    OPC_MULL_U          = _OPC(2, 50),
155    OPC_BFREV_B         = _OPC(2, 51),
156    OPC_CLZ_S           = _OPC(2, 52),
157    OPC_CLZ_B           = _OPC(2, 53),
158    OPC_SHL_B           = _OPC(2, 54),
159    OPC_SHR_B           = _OPC(2, 55),
160    OPC_ASHR_B          = _OPC(2, 56),
161    OPC_BARY_F          = _OPC(2, 57),
162    OPC_MGEN_B          = _OPC(2, 58),
163    OPC_GETBIT_B        = _OPC(2, 59),
164    OPC_SETRM           = _OPC(2, 60),
165    OPC_CBITS_B         = _OPC(2, 61),
166    OPC_SHB             = _OPC(2, 62),
167    OPC_MSAD            = _OPC(2, 63),
168    OPC_FLAT_B          = _OPC(2, 64),
169 
170    /* category 3: */
171    OPC_MAD_U16         = _OPC(3, 0),
172    OPC_MADSH_U16       = _OPC(3, 1),
173    OPC_MAD_S16         = _OPC(3, 2),
174    OPC_MADSH_M16       = _OPC(3, 3),   /* should this be .s16? */
175    OPC_MAD_U24         = _OPC(3, 4),
176    OPC_MAD_S24         = _OPC(3, 5),
177    OPC_MAD_F16         = _OPC(3, 6),
178    OPC_MAD_F32         = _OPC(3, 7),
179    OPC_SEL_B16         = _OPC(3, 8),
180    OPC_SEL_B32         = _OPC(3, 9),
181    OPC_SEL_S16         = _OPC(3, 10),
182    OPC_SEL_S32         = _OPC(3, 11),
183    OPC_SEL_F16         = _OPC(3, 12),
184    OPC_SEL_F32         = _OPC(3, 13),
185    OPC_SAD_S16         = _OPC(3, 14),
186    OPC_SAD_S32         = _OPC(3, 15),
187    OPC_SHRM            = _OPC(3, 16),
188    OPC_SHLM            = _OPC(3, 17),
189    OPC_SHRG            = _OPC(3, 18),
190    OPC_SHLG            = _OPC(3, 19),
191    OPC_ANDG            = _OPC(3, 20),
192    OPC_DP2ACC          = _OPC(3, 21),
193    OPC_DP4ACC          = _OPC(3, 22),
194    OPC_WMM             = _OPC(3, 23),
195    OPC_WMM_ACCU        = _OPC(3, 24),
196 
197    /* category 4: */
198    OPC_RCP             = _OPC(4, 0),
199    OPC_RSQ             = _OPC(4, 1),
200    OPC_LOG2            = _OPC(4, 2),
201    OPC_EXP2            = _OPC(4, 3),
202    OPC_SIN             = _OPC(4, 4),
203    OPC_COS             = _OPC(4, 5),
204    OPC_SQRT            = _OPC(4, 6),
205    /* NOTE that these are 8+opc from their highp equivs, so it's possible
206     * that the high order bit in the opc field has been repurposed for
207     * half-precision use?  But note that other ops (rcp/lsin/cos/sqrt)
208     * still use the same opc as highp
209     */
210    OPC_HRSQ            = _OPC(4, 9),
211    OPC_HLOG2           = _OPC(4, 10),
212    OPC_HEXP2           = _OPC(4, 11),
213 
214    /* category 5: */
215    OPC_ISAM            = _OPC(5, 0),
216    OPC_ISAML           = _OPC(5, 1),
217    OPC_ISAMM           = _OPC(5, 2),
218    OPC_SAM             = _OPC(5, 3),
219    OPC_SAMB            = _OPC(5, 4),
220    OPC_SAML            = _OPC(5, 5),
221    OPC_SAMGQ           = _OPC(5, 6),
222    OPC_GETLOD          = _OPC(5, 7),
223    OPC_CONV            = _OPC(5, 8),
224    OPC_CONVM           = _OPC(5, 9),
225    OPC_GETSIZE         = _OPC(5, 10),
226    OPC_GETBUF          = _OPC(5, 11),
227    OPC_GETPOS          = _OPC(5, 12),
228    OPC_GETINFO         = _OPC(5, 13),
229    OPC_DSX             = _OPC(5, 14),
230    OPC_DSY             = _OPC(5, 15),
231    OPC_GATHER4R        = _OPC(5, 16),
232    OPC_GATHER4G        = _OPC(5, 17),
233    OPC_GATHER4B        = _OPC(5, 18),
234    OPC_GATHER4A        = _OPC(5, 19),
235    OPC_SAMGP0          = _OPC(5, 20),
236    OPC_SAMGP1          = _OPC(5, 21),
237    OPC_SAMGP2          = _OPC(5, 22),
238    OPC_SAMGP3          = _OPC(5, 23),
239    OPC_DSXPP_1         = _OPC(5, 24),
240    OPC_DSYPP_1         = _OPC(5, 25),
241    OPC_RGETPOS         = _OPC(5, 26),
242    OPC_RGETINFO        = _OPC(5, 27),
243    OPC_BRCST_ACTIVE    = _OPC(5, 28),
244    OPC_QUAD_SHUFFLE_BRCST  = _OPC(5, 29),
245    OPC_QUAD_SHUFFLE_HORIZ  = _OPC(5, 30),
246    OPC_QUAD_SHUFFLE_VERT   = _OPC(5, 31),
247    OPC_QUAD_SHUFFLE_DIAG   = _OPC(5, 32),
248    OPC_TCINV               = _OPC(5, 33),
249    /* cat5 meta instructions, placed above the cat5 opc field's size */
250    OPC_DSXPP_MACRO     = _OPC(5, 35),
251    OPC_DSYPP_MACRO     = _OPC(5, 36),
252 
253    /* category 6: */
254    OPC_LDG             = _OPC(6, 0),        /* load-global */
255    OPC_LDL             = _OPC(6, 1),
256    OPC_LDP             = _OPC(6, 2),
257    OPC_STG             = _OPC(6, 3),        /* store-global */
258    OPC_STL             = _OPC(6, 4),
259    OPC_STP             = _OPC(6, 5),
260    OPC_LDIB            = _OPC(6, 6),
261    OPC_G2L             = _OPC(6, 7),
262    OPC_L2G             = _OPC(6, 8),
263    OPC_PREFETCH        = _OPC(6, 9),
264    OPC_LDLW            = _OPC(6, 10),
265    OPC_STLW            = _OPC(6, 11),
266    OPC_RESFMT          = _OPC(6, 14),
267    OPC_RESINFO         = _OPC(6, 15),
268    OPC_ATOMIC_ADD      = _OPC(6, 16),
269    OPC_ATOMIC_SUB      = _OPC(6, 17),
270    OPC_ATOMIC_XCHG     = _OPC(6, 18),
271    OPC_ATOMIC_INC      = _OPC(6, 19),
272    OPC_ATOMIC_DEC      = _OPC(6, 20),
273    OPC_ATOMIC_CMPXCHG  = _OPC(6, 21),
274    OPC_ATOMIC_MIN      = _OPC(6, 22),
275    OPC_ATOMIC_MAX      = _OPC(6, 23),
276    OPC_ATOMIC_AND      = _OPC(6, 24),
277    OPC_ATOMIC_OR       = _OPC(6, 25),
278    OPC_ATOMIC_XOR      = _OPC(6, 26),
279    OPC_LDGB            = _OPC(6, 27),
280    OPC_STGB            = _OPC(6, 28),
281    OPC_STIB            = _OPC(6, 29),
282    OPC_LDC             = _OPC(6, 30),
283    OPC_LDLV            = _OPC(6, 31),
284    OPC_PIPR            = _OPC(6, 32), /* ??? */
285    OPC_PIPC            = _OPC(6, 33), /* ??? */
286    OPC_EMIT2           = _OPC(6, 34), /* ??? */
287    OPC_ENDLS           = _OPC(6, 35), /* ??? */
288    OPC_GETSPID         = _OPC(6, 36), /* SP ID */
289    OPC_GETWID          = _OPC(6, 37), /* wavefront ID */
290    OPC_GETFIBERID      = _OPC(6, 38), /* fiber ID */
291    OPC_SHFL            = _OPC(6, 39),
292 
293    /* Logical opcodes for things that differ in a6xx+ */
294    OPC_STC             = _OPC(6, 40),
295    OPC_RESINFO_B       = _OPC(6, 41),
296    OPC_LDIB_B          = _OPC(6, 42),
297    OPC_STIB_B          = _OPC(6, 43),
298 
299    /* Logical opcodes for different atomic instruction variations: */
300    OPC_ATOMIC_B_ADD      = _OPC(6, 44),
301    OPC_ATOMIC_B_SUB      = _OPC(6, 45),
302    OPC_ATOMIC_B_XCHG     = _OPC(6, 46),
303    OPC_ATOMIC_B_INC      = _OPC(6, 47),
304    OPC_ATOMIC_B_DEC      = _OPC(6, 48),
305    OPC_ATOMIC_B_CMPXCHG  = _OPC(6, 49),
306    OPC_ATOMIC_B_MIN      = _OPC(6, 50),
307    OPC_ATOMIC_B_MAX      = _OPC(6, 51),
308    OPC_ATOMIC_B_AND      = _OPC(6, 52),
309    OPC_ATOMIC_B_OR       = _OPC(6, 53),
310    OPC_ATOMIC_B_XOR      = _OPC(6, 54),
311 
312    OPC_ATOMIC_S_ADD      = _OPC(6, 55),
313    OPC_ATOMIC_S_SUB      = _OPC(6, 56),
314    OPC_ATOMIC_S_XCHG     = _OPC(6, 57),
315    OPC_ATOMIC_S_INC      = _OPC(6, 58),
316    OPC_ATOMIC_S_DEC      = _OPC(6, 59),
317    OPC_ATOMIC_S_CMPXCHG  = _OPC(6, 60),
318    OPC_ATOMIC_S_MIN      = _OPC(6, 61),
319    OPC_ATOMIC_S_MAX      = _OPC(6, 62),
320    OPC_ATOMIC_S_AND      = _OPC(6, 63),
321    OPC_ATOMIC_S_OR       = _OPC(6, 64),
322    OPC_ATOMIC_S_XOR      = _OPC(6, 65),
323 
324    OPC_ATOMIC_G_ADD      = _OPC(6, 66),
325    OPC_ATOMIC_G_SUB      = _OPC(6, 67),
326    OPC_ATOMIC_G_XCHG     = _OPC(6, 68),
327    OPC_ATOMIC_G_INC      = _OPC(6, 69),
328    OPC_ATOMIC_G_DEC      = _OPC(6, 70),
329    OPC_ATOMIC_G_CMPXCHG  = _OPC(6, 71),
330    OPC_ATOMIC_G_MIN      = _OPC(6, 72),
331    OPC_ATOMIC_G_MAX      = _OPC(6, 73),
332    OPC_ATOMIC_G_AND      = _OPC(6, 74),
333    OPC_ATOMIC_G_OR       = _OPC(6, 75),
334    OPC_ATOMIC_G_XOR      = _OPC(6, 76),
335 
336    OPC_LDG_A           = _OPC(6, 77),
337    OPC_STG_A           = _OPC(6, 78),
338 
339    OPC_SPILL_MACRO     = _OPC(6, 79),
340    OPC_RELOAD_MACRO    = _OPC(6, 80),
341 
342    OPC_LDC_K           = _OPC(6, 81),
343    OPC_STSC            = _OPC(6, 82),
344    OPC_LDG_K           = _OPC(6, 83),
345 
346    /* Macros that expand to an stsc at the start of the preamble.
347     * It loads into const file and should not be optimized in any way.
348     */
349    OPC_PUSH_CONSTS_LOAD_MACRO = _OPC(6, 84),
350 
351    /* category 7: */
352    OPC_BAR             = _OPC(7, 0),
353    OPC_FENCE           = _OPC(7, 1),
354    OPC_SLEEP           = _OPC(7, 2),
355    OPC_ICINV           = _OPC(7, 3),
356    OPC_DCCLN           = _OPC(7, 4),
357    OPC_DCINV           = _OPC(7, 5),
358    OPC_DCFLU           = _OPC(7, 6),
359 
360    OPC_LOCK            = _OPC(7, 7),
361    OPC_UNLOCK          = _OPC(7, 8),
362 
363    OPC_ALIAS           = _OPC(7, 9),
364 
365    OPC_CCINV           = _OPC(7, 10),
366 
367    /* meta instructions (category 8): */
368 #define OPC_META 8
369    /* placeholder instr to mark shader inputs: */
370    OPC_META_INPUT      = _OPC(OPC_META, 0),
371    /* The "collect" and "split" instructions are used for keeping
372     * track of instructions that write to multiple dst registers
373     * (split) like texture sample instructions, or read multiple
374     * consecutive scalar registers (collect) (bary.f, texture samp)
375     *
376     * A "split" extracts a scalar component from a vecN, and a
377     * "collect" gathers multiple scalar components into a vecN
378     */
379    OPC_META_SPLIT      = _OPC(OPC_META, 2),
380    OPC_META_COLLECT    = _OPC(OPC_META, 3),
381 
382    /* placeholder for texture fetches that run before FS invocation
383     * starts:
384     */
385    OPC_META_TEX_PREFETCH = _OPC(OPC_META, 4),
386 
387    /* Parallel copies have multiple destinations, and copy each destination
388     * to its corresponding source. This happens "in parallel," meaning that
389     * it happens as-if every source is read first and then every destination
390     * is stored. These are produced in RA when register shuffling is
391     * required, and then lowered away immediately afterwards.
392     */
393    OPC_META_PARALLEL_COPY = _OPC(OPC_META, 5),
394    OPC_META_PHI = _OPC(OPC_META, 6),
395    /*
396     * A manually encoded opcode
397     */
398    OPC_META_RAW = _OPC(OPC_META, 7),
399 } opc_t;
400 /* clang-format on */
401 
402 #define opc_cat(opc) ((int)((opc) >> NOPC_BITS))
403 #define opc_op(opc)  ((unsigned)((opc) & ((1 << NOPC_BITS) - 1)))
404 
405 const char *disasm_a3xx_instr_name(opc_t opc);
406 
407 typedef enum {
408    TYPE_F16 = 0,
409    TYPE_F32 = 1,
410    TYPE_U16 = 2,
411    TYPE_U32 = 3,
412    TYPE_S16 = 4,
413    TYPE_S32 = 5,
414    TYPE_ATOMIC_U64 = 6, /* Only valid for a7xx atomics */
415    TYPE_U8 = 6,
416    TYPE_U8_32 = 7,
417 } type_t;
418 
419 static inline uint32_t
type_size(type_t type)420 type_size(type_t type)
421 {
422    switch (type) {
423    case TYPE_F32:
424    case TYPE_U32:
425    case TYPE_U8_32:
426    case TYPE_S32:
427       return 32;
428    case TYPE_F16:
429    case TYPE_U16:
430    case TYPE_S16:
431       return 16;
432    case TYPE_U8:
433       return 8;
434    default:
435       ir3_assert(0); /* invalid type */
436       return 0;
437    }
438 }
439 
440 static inline type_t
type_uint_size(unsigned bit_size)441 type_uint_size(unsigned bit_size)
442 {
443    switch (bit_size) {
444    case 8:  return TYPE_U8;
445    case 1:  /* 1b bools are treated as normal half-regs */
446    case 16: return TYPE_U16;
447    case 32: return TYPE_U32;
448    case 64:
449       return TYPE_U32;
450    default:
451       ir3_assert(0); /* invalid size */
452       return (type_t)0;
453    }
454 }
455 
456 static inline type_t
type_float_size(unsigned bit_size)457 type_float_size(unsigned bit_size)
458 {
459    switch (bit_size) {
460    case 16: return TYPE_F16;
461    case 32: return TYPE_F32;
462    default:
463       ir3_assert(0); /* invalid size */
464       return (type_t)0;
465    }
466 }
467 
468 static inline int
type_float(type_t type)469 type_float(type_t type)
470 {
471    return (type == TYPE_F32) || (type == TYPE_F16);
472 }
473 
474 static inline int
type_uint(type_t type)475 type_uint(type_t type)
476 {
477    return (type == TYPE_U32) || (type == TYPE_U16) || (type == TYPE_U8) || (type == TYPE_U8_32);
478 }
479 
480 static inline int
type_sint(type_t type)481 type_sint(type_t type)
482 {
483    return (type == TYPE_S32) || (type == TYPE_S16);
484 }
485 
486 typedef enum {
487    ROUND_ZERO = 0,
488    ROUND_EVEN = 1,
489    ROUND_POS_INF = 2,
490    ROUND_NEG_INF = 3,
491 } round_t;
492 
493 /* comp:
494  *   0 - x
495  *   1 - y
496  *   2 - z
497  *   3 - w
498  */
499 static inline uint32_t
regid(int num,int comp)500 regid(int num, int comp)
501 {
502    return (num << 2) | (comp & 0x3);
503 }
504 
505 #define INVALID_REG     regid(63, 0)
506 #define VALIDREG(r)     ((r) != INVALID_REG)
507 #define CONDREG(r, val) COND(VALIDREG(r), (val))
508 
509 /* special registers: */
510 #define REG_A0 61 /* address register */
511 #define REG_P0 62 /* predicate register */
512 #define REG_P0_X regid(REG_P0, 0) /* p0.x */
513 
514 #define INVALID_CONST_REG UINT16_MAX
515 
516 /* With is_bindless_s2en = 1, this determines whether bindless is enabled and
517  * if so, how to get the (base, index) pair for both sampler and texture.
518  * There is a single base embedded in the instruction, which is always used
519  * for the texture.
520  */
521 typedef enum {
522    /* Use traditional GL binding model, get texture and sampler index from src3
523     * which is presumed to be uniform on a4xx+ (a3xx doesn't have the other
524     * modes, but does handle non-uniform indexing).
525     */
526    CAT5_UNIFORM = 0,
527 
528    /* The sampler base comes from the low 3 bits of a1.x, and the sampler
529     * and texture index come from src3 which is presumed to be uniform.
530     */
531    CAT5_BINDLESS_A1_UNIFORM = 1,
532 
533    /* The texture and sampler share the same base, and the sampler and
534     * texture index come from src3 which is *not* presumed to be uniform.
535     */
536    CAT5_BINDLESS_NONUNIFORM = 2,
537 
538    /* The sampler base comes from the low 3 bits of a1.x, and the sampler
539     * and texture index come from src3 which is *not* presumed to be
540     * uniform.
541     */
542    CAT5_BINDLESS_A1_NONUNIFORM = 3,
543 
544    /* Use traditional GL binding model, get texture and sampler index
545     * from src3 which is *not* presumed to be uniform.
546     */
547    CAT5_NONUNIFORM = 4,
548 
549    /* The texture and sampler share the same base, and the sampler and
550     * texture index come from src3 which is presumed to be uniform.
551     */
552    CAT5_BINDLESS_UNIFORM = 5,
553 
554    /* The texture and sampler share the same base, get sampler index from low
555     * 4 bits of src3 and texture index from high 4 bits.
556     */
557    CAT5_BINDLESS_IMM = 6,
558 
559    /* The sampler base comes from the low 3 bits of a1.x, and the texture
560     * index comes from the next 8 bits of a1.x. The sampler index is an
561     * immediate in src3.
562     */
563    CAT5_BINDLESS_A1_IMM = 7,
564 } cat5_desc_mode_t;
565 
566 /* Similar to cat5_desc_mode_t, describes how the descriptor is loaded.
567  */
568 typedef enum {
569    /* Use old GL binding model with an immediate index. */
570    CAT6_IMM = 0,
571 
572    CAT6_UNIFORM = 1,
573 
574    CAT6_NONUNIFORM = 2,
575 
576    /* Use the bindless model, with an immediate index.
577     */
578    CAT6_BINDLESS_IMM = 4,
579 
580    /* Use the bindless model, with a uniform register index.
581     */
582    CAT6_BINDLESS_UNIFORM = 5,
583 
584    /* Use the bindless model, with a register index that isn't guaranteed
585     * to be uniform. This presumably checks if the indices are equal and
586     * splits up the load/store, because it works the way you would
587     * expect.
588     */
589    CAT6_BINDLESS_NONUNIFORM = 6,
590 } cat6_desc_mode_t;
591 
592 static inline bool
is_sat_compatible(opc_t opc)593 is_sat_compatible(opc_t opc)
594 {
595    /* On a6xx saturation doesn't work on cat4 */
596    if (opc_cat(opc) != 2 && opc_cat(opc) != 3)
597       return false;
598 
599    switch (opc) {
600    /* On a3xx and a6xx saturation doesn't work on bary.f/flat.b */
601    case OPC_BARY_F:
602    case OPC_FLAT_B:
603    /* On a6xx saturation doesn't work on sel.* */
604    case OPC_SEL_B16:
605    case OPC_SEL_B32:
606    case OPC_SEL_S16:
607    case OPC_SEL_S32:
608    case OPC_SEL_F16:
609    case OPC_SEL_F32:
610       return false;
611    default:
612       return true;
613    }
614 }
615 
616 static inline bool
is_mad(opc_t opc)617 is_mad(opc_t opc)
618 {
619    switch (opc) {
620    case OPC_MAD_U16:
621    case OPC_MAD_S16:
622    case OPC_MAD_U24:
623    case OPC_MAD_S24:
624    case OPC_MAD_F16:
625    case OPC_MAD_F32:
626       return true;
627    default:
628       return false;
629    }
630 }
631 
632 static inline bool
is_madsh(opc_t opc)633 is_madsh(opc_t opc)
634 {
635    switch (opc) {
636    case OPC_MADSH_U16:
637    case OPC_MADSH_M16:
638       return true;
639    default:
640       return false;
641    }
642 }
643 
644 static inline bool
is_sad(opc_t opc)645 is_sad(opc_t opc)
646 {
647    switch (opc) {
648    case OPC_SAD_S16:
649    case OPC_SAD_S32:
650       return true;
651    default:
652       return false;
653    }
654 }
655 
656 static inline bool
is_local_atomic(opc_t opc)657 is_local_atomic(opc_t opc)
658 {
659    switch (opc) {
660    case OPC_ATOMIC_ADD:
661    case OPC_ATOMIC_SUB:
662    case OPC_ATOMIC_XCHG:
663    case OPC_ATOMIC_INC:
664    case OPC_ATOMIC_DEC:
665    case OPC_ATOMIC_CMPXCHG:
666    case OPC_ATOMIC_MIN:
667    case OPC_ATOMIC_MAX:
668    case OPC_ATOMIC_AND:
669    case OPC_ATOMIC_OR:
670    case OPC_ATOMIC_XOR:
671       return true;
672    default:
673       return false;
674    }
675 }
676 
677 static inline bool
is_global_a3xx_atomic(opc_t opc)678 is_global_a3xx_atomic(opc_t opc)
679 {
680    switch (opc) {
681    case OPC_ATOMIC_S_ADD:
682    case OPC_ATOMIC_S_SUB:
683    case OPC_ATOMIC_S_XCHG:
684    case OPC_ATOMIC_S_INC:
685    case OPC_ATOMIC_S_DEC:
686    case OPC_ATOMIC_S_CMPXCHG:
687    case OPC_ATOMIC_S_MIN:
688    case OPC_ATOMIC_S_MAX:
689    case OPC_ATOMIC_S_AND:
690    case OPC_ATOMIC_S_OR:
691    case OPC_ATOMIC_S_XOR:
692       return true;
693    default:
694       return false;
695    }
696 }
697 
698 static inline bool
is_global_a6xx_atomic(opc_t opc)699 is_global_a6xx_atomic(opc_t opc)
700 {
701    switch (opc) {
702    case OPC_ATOMIC_G_ADD:
703    case OPC_ATOMIC_G_SUB:
704    case OPC_ATOMIC_G_XCHG:
705    case OPC_ATOMIC_G_INC:
706    case OPC_ATOMIC_G_DEC:
707    case OPC_ATOMIC_G_CMPXCHG:
708    case OPC_ATOMIC_G_MIN:
709    case OPC_ATOMIC_G_MAX:
710    case OPC_ATOMIC_G_AND:
711    case OPC_ATOMIC_G_OR:
712    case OPC_ATOMIC_G_XOR:
713       return true;
714    default:
715       return false;
716    }
717 }
718 
719 static inline bool
is_bindless_atomic(opc_t opc)720 is_bindless_atomic(opc_t opc)
721 {
722    switch (opc) {
723    case OPC_ATOMIC_B_ADD:
724    case OPC_ATOMIC_B_SUB:
725    case OPC_ATOMIC_B_XCHG:
726    case OPC_ATOMIC_B_INC:
727    case OPC_ATOMIC_B_DEC:
728    case OPC_ATOMIC_B_CMPXCHG:
729    case OPC_ATOMIC_B_MIN:
730    case OPC_ATOMIC_B_MAX:
731    case OPC_ATOMIC_B_AND:
732    case OPC_ATOMIC_B_OR:
733    case OPC_ATOMIC_B_XOR:
734       return true;
735    default:
736       return false;
737    }
738 }
739 
740 static inline bool
is_atomic(opc_t opc)741 is_atomic(opc_t opc)
742 {
743    return is_local_atomic(opc) || is_global_a3xx_atomic(opc) ||
744           is_global_a6xx_atomic(opc) || is_bindless_atomic(opc);
745 }
746 
747 static inline bool
is_ssbo(opc_t opc)748 is_ssbo(opc_t opc)
749 {
750    switch (opc) {
751    case OPC_RESFMT:
752    case OPC_RESINFO:
753    case OPC_LDGB:
754    case OPC_STGB:
755    case OPC_STIB:
756       return true;
757    default:
758       return false;
759    }
760 }
761 
762 static inline bool
is_isam(opc_t opc)763 is_isam(opc_t opc)
764 {
765    switch (opc) {
766    case OPC_ISAM:
767    case OPC_ISAML:
768    case OPC_ISAMM:
769       return true;
770    default:
771       return false;
772    }
773 }
774 
775 static inline bool
is_cat2_float(opc_t opc)776 is_cat2_float(opc_t opc)
777 {
778    switch (opc) {
779    case OPC_ADD_F:
780    case OPC_MIN_F:
781    case OPC_MAX_F:
782    case OPC_MUL_F:
783    case OPC_SIGN_F:
784    case OPC_CMPS_F:
785    case OPC_ABSNEG_F:
786    case OPC_CMPV_F:
787    case OPC_FLOOR_F:
788    case OPC_CEIL_F:
789    case OPC_RNDNE_F:
790    case OPC_RNDAZ_F:
791    case OPC_TRUNC_F:
792       return true;
793 
794    default:
795       return false;
796    }
797 }
798 
799 static inline bool
is_cat3_float(opc_t opc)800 is_cat3_float(opc_t opc)
801 {
802    switch (opc) {
803    case OPC_MAD_F16:
804    case OPC_MAD_F32:
805    case OPC_SEL_F16:
806    case OPC_SEL_F32:
807       return true;
808    default:
809       return false;
810    }
811 }
812 
813 static inline bool
is_cat3_alt(opc_t opc)814 is_cat3_alt(opc_t opc)
815 {
816    switch (opc) {
817    case OPC_SHLM:
818    case OPC_SHRM:
819    case OPC_SHLG:
820    case OPC_SHRG:
821    case OPC_ANDG:
822       return true;
823    default:
824       return false;
825    }
826 }
827 
828 #endif /* INSTR_A3XX_H_ */
829