1 /* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */
2 /* TILE-Gx opcode information.
3 *
4 * Copyright 2011 Tilera Corporation. All Rights Reserved.
5 *
6 * This program is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU General Public License
8 * as published by the Free Software Foundation, version 2.
9 *
10 * This program is distributed in the hope that it will be useful, but
11 * WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE, GOOD TITLE or
13 * NON INFRINGEMENT. See the GNU General Public License for
14 * more details.
15 *
16 *
17 *
18 *
19 *
20 */
21
22 #ifndef __ARCH_OPCODE_H__
23 #define __ARCH_OPCODE_H__
24
25 #ifndef __ASSEMBLER__
26
27 typedef unsigned long long tilegx_bundle_bits;
28
29 /* These are the bits that determine if a bundle is in the X encoding. */
30 #define TILEGX_BUNDLE_MODE_MASK ((tilegx_bundle_bits)3 << 62)
31
32 enum
33 {
34 /* Maximum number of instructions in a bundle (2 for X, 3 for Y). */
35 TILEGX_MAX_INSTRUCTIONS_PER_BUNDLE = 3,
36
37 /* How many different pipeline encodings are there? X0, X1, Y0, Y1, Y2. */
38 TILEGX_NUM_PIPELINE_ENCODINGS = 5,
39
40 /* Log base 2 of TILEGX_BUNDLE_SIZE_IN_BYTES. */
41 TILEGX_LOG2_BUNDLE_SIZE_IN_BYTES = 3,
42
43 /* Instructions take this many bytes. */
44 TILEGX_BUNDLE_SIZE_IN_BYTES = 1 << TILEGX_LOG2_BUNDLE_SIZE_IN_BYTES,
45
46 /* Log base 2 of TILEGX_BUNDLE_ALIGNMENT_IN_BYTES. */
47 TILEGX_LOG2_BUNDLE_ALIGNMENT_IN_BYTES = 3,
48
49 /* Bundles should be aligned modulo this number of bytes. */
50 TILEGX_BUNDLE_ALIGNMENT_IN_BYTES =
51 (1 << TILEGX_LOG2_BUNDLE_ALIGNMENT_IN_BYTES),
52
53 /* Number of registers (some are magic, such as network I/O). */
54 TILEGX_NUM_REGISTERS = 64,
55 };
56
57 /* Make a few "tile_" variables to simplify common code between
58 architectures. */
59
60 typedef tilegx_bundle_bits tile_bundle_bits;
61 #define TILE_BUNDLE_SIZE_IN_BYTES TILEGX_BUNDLE_SIZE_IN_BYTES
62 #define TILE_BUNDLE_ALIGNMENT_IN_BYTES TILEGX_BUNDLE_ALIGNMENT_IN_BYTES
63 #define TILE_LOG2_BUNDLE_ALIGNMENT_IN_BYTES \
64 TILEGX_LOG2_BUNDLE_ALIGNMENT_IN_BYTES
65 #define TILE_BPT_BUNDLE TILEGX_BPT_BUNDLE
66
67 /* 64-bit pattern for a { bpt ; nop } bundle. */
68 #define TILEGX_BPT_BUNDLE 0x286a44ae51485000ULL
69
70 static __inline unsigned int
get_BFEnd_X0(tilegx_bundle_bits num)71 get_BFEnd_X0(tilegx_bundle_bits num)
72 {
73 const unsigned int n = (unsigned int)num;
74 return (((n >> 12)) & 0x3f);
75 }
76
77 static __inline unsigned int
get_BFOpcodeExtension_X0(tilegx_bundle_bits num)78 get_BFOpcodeExtension_X0(tilegx_bundle_bits num)
79 {
80 const unsigned int n = (unsigned int)num;
81 return (((n >> 24)) & 0xf);
82 }
83
84 static __inline unsigned int
get_BFStart_X0(tilegx_bundle_bits num)85 get_BFStart_X0(tilegx_bundle_bits num)
86 {
87 const unsigned int n = (unsigned int)num;
88 return (((n >> 18)) & 0x3f);
89 }
90
91 static __inline unsigned int
get_BrOff_X1(tilegx_bundle_bits n)92 get_BrOff_X1(tilegx_bundle_bits n)
93 {
94 return (((unsigned int)(n >> 31)) & 0x0000003f) |
95 (((unsigned int)(n >> 37)) & 0x0001ffc0);
96 }
97
98 static __inline unsigned int
get_BrType_X1(tilegx_bundle_bits n)99 get_BrType_X1(tilegx_bundle_bits n)
100 {
101 return (((unsigned int)(n >> 54)) & 0x1f);
102 }
103
104 static __inline unsigned int
get_Dest_Imm8_X1(tilegx_bundle_bits n)105 get_Dest_Imm8_X1(tilegx_bundle_bits n)
106 {
107 return (((unsigned int)(n >> 31)) & 0x0000003f) |
108 (((unsigned int)(n >> 43)) & 0x000000c0);
109 }
110
111 static __inline unsigned int
get_Dest_X0(tilegx_bundle_bits num)112 get_Dest_X0(tilegx_bundle_bits num)
113 {
114 const unsigned int n = (unsigned int)num;
115 return (((n >> 0)) & 0x3f);
116 }
117
118 static __inline unsigned int
get_Dest_X1(tilegx_bundle_bits n)119 get_Dest_X1(tilegx_bundle_bits n)
120 {
121 return (((unsigned int)(n >> 31)) & 0x3f);
122 }
123
124 static __inline unsigned int
get_Dest_Y0(tilegx_bundle_bits num)125 get_Dest_Y0(tilegx_bundle_bits num)
126 {
127 const unsigned int n = (unsigned int)num;
128 return (((n >> 0)) & 0x3f);
129 }
130
131 static __inline unsigned int
get_Dest_Y1(tilegx_bundle_bits n)132 get_Dest_Y1(tilegx_bundle_bits n)
133 {
134 return (((unsigned int)(n >> 31)) & 0x3f);
135 }
136
137 static __inline unsigned int
get_Imm16_X0(tilegx_bundle_bits num)138 get_Imm16_X0(tilegx_bundle_bits num)
139 {
140 const unsigned int n = (unsigned int)num;
141 return (((n >> 12)) & 0xffff);
142 }
143
144 static __inline unsigned int
get_Imm16_X1(tilegx_bundle_bits n)145 get_Imm16_X1(tilegx_bundle_bits n)
146 {
147 return (((unsigned int)(n >> 43)) & 0xffff);
148 }
149
150 static __inline unsigned int
get_Imm8OpcodeExtension_X0(tilegx_bundle_bits num)151 get_Imm8OpcodeExtension_X0(tilegx_bundle_bits num)
152 {
153 const unsigned int n = (unsigned int)num;
154 return (((n >> 20)) & 0xff);
155 }
156
157 static __inline unsigned int
get_Imm8OpcodeExtension_X1(tilegx_bundle_bits n)158 get_Imm8OpcodeExtension_X1(tilegx_bundle_bits n)
159 {
160 return (((unsigned int)(n >> 51)) & 0xff);
161 }
162
163 static __inline unsigned int
get_Imm8_X0(tilegx_bundle_bits num)164 get_Imm8_X0(tilegx_bundle_bits num)
165 {
166 const unsigned int n = (unsigned int)num;
167 return (((n >> 12)) & 0xff);
168 }
169
170 static __inline unsigned int
get_Imm8_X1(tilegx_bundle_bits n)171 get_Imm8_X1(tilegx_bundle_bits n)
172 {
173 return (((unsigned int)(n >> 43)) & 0xff);
174 }
175
176 static __inline unsigned int
get_Imm8_Y0(tilegx_bundle_bits num)177 get_Imm8_Y0(tilegx_bundle_bits num)
178 {
179 const unsigned int n = (unsigned int)num;
180 return (((n >> 12)) & 0xff);
181 }
182
183 static __inline unsigned int
get_Imm8_Y1(tilegx_bundle_bits n)184 get_Imm8_Y1(tilegx_bundle_bits n)
185 {
186 return (((unsigned int)(n >> 43)) & 0xff);
187 }
188
189 static __inline unsigned int
get_JumpOff_X1(tilegx_bundle_bits n)190 get_JumpOff_X1(tilegx_bundle_bits n)
191 {
192 return (((unsigned int)(n >> 31)) & 0x7ffffff);
193 }
194
195 static __inline unsigned int
get_JumpOpcodeExtension_X1(tilegx_bundle_bits n)196 get_JumpOpcodeExtension_X1(tilegx_bundle_bits n)
197 {
198 return (((unsigned int)(n >> 58)) & 0x1);
199 }
200
201 static __inline unsigned int
get_MF_Imm14_X1(tilegx_bundle_bits n)202 get_MF_Imm14_X1(tilegx_bundle_bits n)
203 {
204 return (((unsigned int)(n >> 37)) & 0x3fff);
205 }
206
207 static __inline unsigned int
get_MT_Imm14_X1(tilegx_bundle_bits n)208 get_MT_Imm14_X1(tilegx_bundle_bits n)
209 {
210 return (((unsigned int)(n >> 31)) & 0x0000003f) |
211 (((unsigned int)(n >> 37)) & 0x00003fc0);
212 }
213
214 static __inline unsigned int
get_Mode(tilegx_bundle_bits n)215 get_Mode(tilegx_bundle_bits n)
216 {
217 return (((unsigned int)(n >> 62)) & 0x3);
218 }
219
220 static __inline unsigned int
get_Opcode_X0(tilegx_bundle_bits num)221 get_Opcode_X0(tilegx_bundle_bits num)
222 {
223 const unsigned int n = (unsigned int)num;
224 return (((n >> 28)) & 0x7);
225 }
226
227 static __inline unsigned int
get_Opcode_X1(tilegx_bundle_bits n)228 get_Opcode_X1(tilegx_bundle_bits n)
229 {
230 return (((unsigned int)(n >> 59)) & 0x7);
231 }
232
233 static __inline unsigned int
get_Opcode_Y0(tilegx_bundle_bits num)234 get_Opcode_Y0(tilegx_bundle_bits num)
235 {
236 const unsigned int n = (unsigned int)num;
237 return (((n >> 27)) & 0xf);
238 }
239
240 static __inline unsigned int
get_Opcode_Y1(tilegx_bundle_bits n)241 get_Opcode_Y1(tilegx_bundle_bits n)
242 {
243 return (((unsigned int)(n >> 58)) & 0xf);
244 }
245
246 static __inline unsigned int
get_Opcode_Y2(tilegx_bundle_bits n)247 get_Opcode_Y2(tilegx_bundle_bits n)
248 {
249 return (((n >> 26)) & 0x00000001) |
250 (((unsigned int)(n >> 56)) & 0x00000002);
251 }
252
253 static __inline unsigned int
get_RRROpcodeExtension_X0(tilegx_bundle_bits num)254 get_RRROpcodeExtension_X0(tilegx_bundle_bits num)
255 {
256 const unsigned int n = (unsigned int)num;
257 return (((n >> 18)) & 0x3ff);
258 }
259
260 static __inline unsigned int
get_RRROpcodeExtension_X1(tilegx_bundle_bits n)261 get_RRROpcodeExtension_X1(tilegx_bundle_bits n)
262 {
263 return (((unsigned int)(n >> 49)) & 0x3ff);
264 }
265
266 static __inline unsigned int
get_RRROpcodeExtension_Y0(tilegx_bundle_bits num)267 get_RRROpcodeExtension_Y0(tilegx_bundle_bits num)
268 {
269 const unsigned int n = (unsigned int)num;
270 return (((n >> 18)) & 0x3);
271 }
272
273 static __inline unsigned int
get_RRROpcodeExtension_Y1(tilegx_bundle_bits n)274 get_RRROpcodeExtension_Y1(tilegx_bundle_bits n)
275 {
276 return (((unsigned int)(n >> 49)) & 0x3);
277 }
278
279 static __inline unsigned int
get_ShAmt_X0(tilegx_bundle_bits num)280 get_ShAmt_X0(tilegx_bundle_bits num)
281 {
282 const unsigned int n = (unsigned int)num;
283 return (((n >> 12)) & 0x3f);
284 }
285
286 static __inline unsigned int
get_ShAmt_X1(tilegx_bundle_bits n)287 get_ShAmt_X1(tilegx_bundle_bits n)
288 {
289 return (((unsigned int)(n >> 43)) & 0x3f);
290 }
291
292 static __inline unsigned int
get_ShAmt_Y0(tilegx_bundle_bits num)293 get_ShAmt_Y0(tilegx_bundle_bits num)
294 {
295 const unsigned int n = (unsigned int)num;
296 return (((n >> 12)) & 0x3f);
297 }
298
299 static __inline unsigned int
get_ShAmt_Y1(tilegx_bundle_bits n)300 get_ShAmt_Y1(tilegx_bundle_bits n)
301 {
302 return (((unsigned int)(n >> 43)) & 0x3f);
303 }
304
305 static __inline unsigned int
get_ShiftOpcodeExtension_X0(tilegx_bundle_bits num)306 get_ShiftOpcodeExtension_X0(tilegx_bundle_bits num)
307 {
308 const unsigned int n = (unsigned int)num;
309 return (((n >> 18)) & 0x3ff);
310 }
311
312 static __inline unsigned int
get_ShiftOpcodeExtension_X1(tilegx_bundle_bits n)313 get_ShiftOpcodeExtension_X1(tilegx_bundle_bits n)
314 {
315 return (((unsigned int)(n >> 49)) & 0x3ff);
316 }
317
318 static __inline unsigned int
get_ShiftOpcodeExtension_Y0(tilegx_bundle_bits num)319 get_ShiftOpcodeExtension_Y0(tilegx_bundle_bits num)
320 {
321 const unsigned int n = (unsigned int)num;
322 return (((n >> 18)) & 0x3);
323 }
324
325 static __inline unsigned int
get_ShiftOpcodeExtension_Y1(tilegx_bundle_bits n)326 get_ShiftOpcodeExtension_Y1(tilegx_bundle_bits n)
327 {
328 return (((unsigned int)(n >> 49)) & 0x3);
329 }
330
331 static __inline unsigned int
get_SrcA_X0(tilegx_bundle_bits num)332 get_SrcA_X0(tilegx_bundle_bits num)
333 {
334 const unsigned int n = (unsigned int)num;
335 return (((n >> 6)) & 0x3f);
336 }
337
338 static __inline unsigned int
get_SrcA_X1(tilegx_bundle_bits n)339 get_SrcA_X1(tilegx_bundle_bits n)
340 {
341 return (((unsigned int)(n >> 37)) & 0x3f);
342 }
343
344 static __inline unsigned int
get_SrcA_Y0(tilegx_bundle_bits num)345 get_SrcA_Y0(tilegx_bundle_bits num)
346 {
347 const unsigned int n = (unsigned int)num;
348 return (((n >> 6)) & 0x3f);
349 }
350
351 static __inline unsigned int
get_SrcA_Y1(tilegx_bundle_bits n)352 get_SrcA_Y1(tilegx_bundle_bits n)
353 {
354 return (((unsigned int)(n >> 37)) & 0x3f);
355 }
356
357 static __inline unsigned int
get_SrcA_Y2(tilegx_bundle_bits num)358 get_SrcA_Y2(tilegx_bundle_bits num)
359 {
360 const unsigned int n = (unsigned int)num;
361 return (((n >> 20)) & 0x3f);
362 }
363
364 static __inline unsigned int
get_SrcBDest_Y2(tilegx_bundle_bits n)365 get_SrcBDest_Y2(tilegx_bundle_bits n)
366 {
367 return (((unsigned int)(n >> 51)) & 0x3f);
368 }
369
370 static __inline unsigned int
get_SrcB_X0(tilegx_bundle_bits num)371 get_SrcB_X0(tilegx_bundle_bits num)
372 {
373 const unsigned int n = (unsigned int)num;
374 return (((n >> 12)) & 0x3f);
375 }
376
377 static __inline unsigned int
get_SrcB_X1(tilegx_bundle_bits n)378 get_SrcB_X1(tilegx_bundle_bits n)
379 {
380 return (((unsigned int)(n >> 43)) & 0x3f);
381 }
382
383 static __inline unsigned int
get_SrcB_Y0(tilegx_bundle_bits num)384 get_SrcB_Y0(tilegx_bundle_bits num)
385 {
386 const unsigned int n = (unsigned int)num;
387 return (((n >> 12)) & 0x3f);
388 }
389
390 static __inline unsigned int
get_SrcB_Y1(tilegx_bundle_bits n)391 get_SrcB_Y1(tilegx_bundle_bits n)
392 {
393 return (((unsigned int)(n >> 43)) & 0x3f);
394 }
395
396 static __inline unsigned int
get_UnaryOpcodeExtension_X0(tilegx_bundle_bits num)397 get_UnaryOpcodeExtension_X0(tilegx_bundle_bits num)
398 {
399 const unsigned int n = (unsigned int)num;
400 return (((n >> 12)) & 0x3f);
401 }
402
403 static __inline unsigned int
get_UnaryOpcodeExtension_X1(tilegx_bundle_bits n)404 get_UnaryOpcodeExtension_X1(tilegx_bundle_bits n)
405 {
406 return (((unsigned int)(n >> 43)) & 0x3f);
407 }
408
409 static __inline unsigned int
get_UnaryOpcodeExtension_Y0(tilegx_bundle_bits num)410 get_UnaryOpcodeExtension_Y0(tilegx_bundle_bits num)
411 {
412 const unsigned int n = (unsigned int)num;
413 return (((n >> 12)) & 0x3f);
414 }
415
416 static __inline unsigned int
get_UnaryOpcodeExtension_Y1(tilegx_bundle_bits n)417 get_UnaryOpcodeExtension_Y1(tilegx_bundle_bits n)
418 {
419 return (((unsigned int)(n >> 43)) & 0x3f);
420 }
421
422
423 static __inline int
sign_extend(int n,int num_bits)424 sign_extend(int n, int num_bits)
425 {
426 int shift = (int)(sizeof(int) * 8 - num_bits);
427 return (n << shift) >> shift;
428 }
429
430
431
432 static __inline tilegx_bundle_bits
create_BFEnd_X0(int num)433 create_BFEnd_X0(int num)
434 {
435 const unsigned int n = (unsigned int)num;
436 return ((n & 0x3f) << 12);
437 }
438
439 static __inline tilegx_bundle_bits
create_BFOpcodeExtension_X0(int num)440 create_BFOpcodeExtension_X0(int num)
441 {
442 const unsigned int n = (unsigned int)num;
443 return ((n & 0xf) << 24);
444 }
445
446 static __inline tilegx_bundle_bits
create_BFStart_X0(int num)447 create_BFStart_X0(int num)
448 {
449 const unsigned int n = (unsigned int)num;
450 return ((n & 0x3f) << 18);
451 }
452
453 static __inline tilegx_bundle_bits
create_BrOff_X1(int num)454 create_BrOff_X1(int num)
455 {
456 const unsigned int n = (unsigned int)num;
457 return (((tilegx_bundle_bits)(n & 0x0000003f)) << 31) |
458 (((tilegx_bundle_bits)(n & 0x0001ffc0)) << 37);
459 }
460
461 static __inline tilegx_bundle_bits
create_BrType_X1(int num)462 create_BrType_X1(int num)
463 {
464 const unsigned int n = (unsigned int)num;
465 return (((tilegx_bundle_bits)(n & 0x1f)) << 54);
466 }
467
468 static __inline tilegx_bundle_bits
create_Dest_Imm8_X1(int num)469 create_Dest_Imm8_X1(int num)
470 {
471 const unsigned int n = (unsigned int)num;
472 return (((tilegx_bundle_bits)(n & 0x0000003f)) << 31) |
473 (((tilegx_bundle_bits)(n & 0x000000c0)) << 43);
474 }
475
476 static __inline tilegx_bundle_bits
create_Dest_X0(int num)477 create_Dest_X0(int num)
478 {
479 const unsigned int n = (unsigned int)num;
480 return ((n & 0x3f) << 0);
481 }
482
483 static __inline tilegx_bundle_bits
create_Dest_X1(int num)484 create_Dest_X1(int num)
485 {
486 const unsigned int n = (unsigned int)num;
487 return (((tilegx_bundle_bits)(n & 0x3f)) << 31);
488 }
489
490 static __inline tilegx_bundle_bits
create_Dest_Y0(int num)491 create_Dest_Y0(int num)
492 {
493 const unsigned int n = (unsigned int)num;
494 return ((n & 0x3f) << 0);
495 }
496
497 static __inline tilegx_bundle_bits
create_Dest_Y1(int num)498 create_Dest_Y1(int num)
499 {
500 const unsigned int n = (unsigned int)num;
501 return (((tilegx_bundle_bits)(n & 0x3f)) << 31);
502 }
503
504 static __inline tilegx_bundle_bits
create_Imm16_X0(int num)505 create_Imm16_X0(int num)
506 {
507 const unsigned int n = (unsigned int)num;
508 return ((n & 0xffff) << 12);
509 }
510
511 static __inline tilegx_bundle_bits
create_Imm16_X1(int num)512 create_Imm16_X1(int num)
513 {
514 const unsigned int n = (unsigned int)num;
515 return (((tilegx_bundle_bits)(n & 0xffff)) << 43);
516 }
517
518 static __inline tilegx_bundle_bits
create_Imm8OpcodeExtension_X0(int num)519 create_Imm8OpcodeExtension_X0(int num)
520 {
521 const unsigned int n = (unsigned int)num;
522 return ((n & 0xff) << 20);
523 }
524
525 static __inline tilegx_bundle_bits
create_Imm8OpcodeExtension_X1(int num)526 create_Imm8OpcodeExtension_X1(int num)
527 {
528 const unsigned int n = (unsigned int)num;
529 return (((tilegx_bundle_bits)(n & 0xff)) << 51);
530 }
531
532 static __inline tilegx_bundle_bits
create_Imm8_X0(int num)533 create_Imm8_X0(int num)
534 {
535 const unsigned int n = (unsigned int)num;
536 return ((n & 0xff) << 12);
537 }
538
539 static __inline tilegx_bundle_bits
create_Imm8_X1(int num)540 create_Imm8_X1(int num)
541 {
542 const unsigned int n = (unsigned int)num;
543 return (((tilegx_bundle_bits)(n & 0xff)) << 43);
544 }
545
546 static __inline tilegx_bundle_bits
create_Imm8_Y0(int num)547 create_Imm8_Y0(int num)
548 {
549 const unsigned int n = (unsigned int)num;
550 return ((n & 0xff) << 12);
551 }
552
553 static __inline tilegx_bundle_bits
create_Imm8_Y1(int num)554 create_Imm8_Y1(int num)
555 {
556 const unsigned int n = (unsigned int)num;
557 return (((tilegx_bundle_bits)(n & 0xff)) << 43);
558 }
559
560 static __inline tilegx_bundle_bits
create_JumpOff_X1(int num)561 create_JumpOff_X1(int num)
562 {
563 const unsigned int n = (unsigned int)num;
564 return (((tilegx_bundle_bits)(n & 0x7ffffff)) << 31);
565 }
566
567 static __inline tilegx_bundle_bits
create_JumpOpcodeExtension_X1(int num)568 create_JumpOpcodeExtension_X1(int num)
569 {
570 const unsigned int n = (unsigned int)num;
571 return (((tilegx_bundle_bits)(n & 0x1)) << 58);
572 }
573
574 static __inline tilegx_bundle_bits
create_MF_Imm14_X1(int num)575 create_MF_Imm14_X1(int num)
576 {
577 const unsigned int n = (unsigned int)num;
578 return (((tilegx_bundle_bits)(n & 0x3fff)) << 37);
579 }
580
581 static __inline tilegx_bundle_bits
create_MT_Imm14_X1(int num)582 create_MT_Imm14_X1(int num)
583 {
584 const unsigned int n = (unsigned int)num;
585 return (((tilegx_bundle_bits)(n & 0x0000003f)) << 31) |
586 (((tilegx_bundle_bits)(n & 0x00003fc0)) << 37);
587 }
588
589 static __inline tilegx_bundle_bits
create_Mode(int num)590 create_Mode(int num)
591 {
592 const unsigned int n = (unsigned int)num;
593 return (((tilegx_bundle_bits)(n & 0x3)) << 62);
594 }
595
596 static __inline tilegx_bundle_bits
create_Opcode_X0(int num)597 create_Opcode_X0(int num)
598 {
599 const unsigned int n = (unsigned int)num;
600 return ((n & 0x7) << 28);
601 }
602
603 static __inline tilegx_bundle_bits
create_Opcode_X1(int num)604 create_Opcode_X1(int num)
605 {
606 const unsigned int n = (unsigned int)num;
607 return (((tilegx_bundle_bits)(n & 0x7)) << 59);
608 }
609
610 static __inline tilegx_bundle_bits
create_Opcode_Y0(int num)611 create_Opcode_Y0(int num)
612 {
613 const unsigned int n = (unsigned int)num;
614 return ((n & 0xf) << 27);
615 }
616
617 static __inline tilegx_bundle_bits
create_Opcode_Y1(int num)618 create_Opcode_Y1(int num)
619 {
620 const unsigned int n = (unsigned int)num;
621 return (((tilegx_bundle_bits)(n & 0xf)) << 58);
622 }
623
624 static __inline tilegx_bundle_bits
create_Opcode_Y2(int num)625 create_Opcode_Y2(int num)
626 {
627 const unsigned int n = (unsigned int)num;
628 return ((n & 0x00000001) << 26) |
629 (((tilegx_bundle_bits)(n & 0x00000002)) << 56);
630 }
631
632 static __inline tilegx_bundle_bits
create_RRROpcodeExtension_X0(int num)633 create_RRROpcodeExtension_X0(int num)
634 {
635 const unsigned int n = (unsigned int)num;
636 return ((n & 0x3ff) << 18);
637 }
638
639 static __inline tilegx_bundle_bits
create_RRROpcodeExtension_X1(int num)640 create_RRROpcodeExtension_X1(int num)
641 {
642 const unsigned int n = (unsigned int)num;
643 return (((tilegx_bundle_bits)(n & 0x3ff)) << 49);
644 }
645
646 static __inline tilegx_bundle_bits
create_RRROpcodeExtension_Y0(int num)647 create_RRROpcodeExtension_Y0(int num)
648 {
649 const unsigned int n = (unsigned int)num;
650 return ((n & 0x3) << 18);
651 }
652
653 static __inline tilegx_bundle_bits
create_RRROpcodeExtension_Y1(int num)654 create_RRROpcodeExtension_Y1(int num)
655 {
656 const unsigned int n = (unsigned int)num;
657 return (((tilegx_bundle_bits)(n & 0x3)) << 49);
658 }
659
660 static __inline tilegx_bundle_bits
create_ShAmt_X0(int num)661 create_ShAmt_X0(int num)
662 {
663 const unsigned int n = (unsigned int)num;
664 return ((n & 0x3f) << 12);
665 }
666
667 static __inline tilegx_bundle_bits
create_ShAmt_X1(int num)668 create_ShAmt_X1(int num)
669 {
670 const unsigned int n = (unsigned int)num;
671 return (((tilegx_bundle_bits)(n & 0x3f)) << 43);
672 }
673
674 static __inline tilegx_bundle_bits
create_ShAmt_Y0(int num)675 create_ShAmt_Y0(int num)
676 {
677 const unsigned int n = (unsigned int)num;
678 return ((n & 0x3f) << 12);
679 }
680
681 static __inline tilegx_bundle_bits
create_ShAmt_Y1(int num)682 create_ShAmt_Y1(int num)
683 {
684 const unsigned int n = (unsigned int)num;
685 return (((tilegx_bundle_bits)(n & 0x3f)) << 43);
686 }
687
688 static __inline tilegx_bundle_bits
create_ShiftOpcodeExtension_X0(int num)689 create_ShiftOpcodeExtension_X0(int num)
690 {
691 const unsigned int n = (unsigned int)num;
692 return ((n & 0x3ff) << 18);
693 }
694
695 static __inline tilegx_bundle_bits
create_ShiftOpcodeExtension_X1(int num)696 create_ShiftOpcodeExtension_X1(int num)
697 {
698 const unsigned int n = (unsigned int)num;
699 return (((tilegx_bundle_bits)(n & 0x3ff)) << 49);
700 }
701
702 static __inline tilegx_bundle_bits
create_ShiftOpcodeExtension_Y0(int num)703 create_ShiftOpcodeExtension_Y0(int num)
704 {
705 const unsigned int n = (unsigned int)num;
706 return ((n & 0x3) << 18);
707 }
708
709 static __inline tilegx_bundle_bits
create_ShiftOpcodeExtension_Y1(int num)710 create_ShiftOpcodeExtension_Y1(int num)
711 {
712 const unsigned int n = (unsigned int)num;
713 return (((tilegx_bundle_bits)(n & 0x3)) << 49);
714 }
715
716 static __inline tilegx_bundle_bits
create_SrcA_X0(int num)717 create_SrcA_X0(int num)
718 {
719 const unsigned int n = (unsigned int)num;
720 return ((n & 0x3f) << 6);
721 }
722
723 static __inline tilegx_bundle_bits
create_SrcA_X1(int num)724 create_SrcA_X1(int num)
725 {
726 const unsigned int n = (unsigned int)num;
727 return (((tilegx_bundle_bits)(n & 0x3f)) << 37);
728 }
729
730 static __inline tilegx_bundle_bits
create_SrcA_Y0(int num)731 create_SrcA_Y0(int num)
732 {
733 const unsigned int n = (unsigned int)num;
734 return ((n & 0x3f) << 6);
735 }
736
737 static __inline tilegx_bundle_bits
create_SrcA_Y1(int num)738 create_SrcA_Y1(int num)
739 {
740 const unsigned int n = (unsigned int)num;
741 return (((tilegx_bundle_bits)(n & 0x3f)) << 37);
742 }
743
744 static __inline tilegx_bundle_bits
create_SrcA_Y2(int num)745 create_SrcA_Y2(int num)
746 {
747 const unsigned int n = (unsigned int)num;
748 return ((n & 0x3f) << 20);
749 }
750
751 static __inline tilegx_bundle_bits
create_SrcBDest_Y2(int num)752 create_SrcBDest_Y2(int num)
753 {
754 const unsigned int n = (unsigned int)num;
755 return (((tilegx_bundle_bits)(n & 0x3f)) << 51);
756 }
757
758 static __inline tilegx_bundle_bits
create_SrcB_X0(int num)759 create_SrcB_X0(int num)
760 {
761 const unsigned int n = (unsigned int)num;
762 return ((n & 0x3f) << 12);
763 }
764
765 static __inline tilegx_bundle_bits
create_SrcB_X1(int num)766 create_SrcB_X1(int num)
767 {
768 const unsigned int n = (unsigned int)num;
769 return (((tilegx_bundle_bits)(n & 0x3f)) << 43);
770 }
771
772 static __inline tilegx_bundle_bits
create_SrcB_Y0(int num)773 create_SrcB_Y0(int num)
774 {
775 const unsigned int n = (unsigned int)num;
776 return ((n & 0x3f) << 12);
777 }
778
779 static __inline tilegx_bundle_bits
create_SrcB_Y1(int num)780 create_SrcB_Y1(int num)
781 {
782 const unsigned int n = (unsigned int)num;
783 return (((tilegx_bundle_bits)(n & 0x3f)) << 43);
784 }
785
786 static __inline tilegx_bundle_bits
create_UnaryOpcodeExtension_X0(int num)787 create_UnaryOpcodeExtension_X0(int num)
788 {
789 const unsigned int n = (unsigned int)num;
790 return ((n & 0x3f) << 12);
791 }
792
793 static __inline tilegx_bundle_bits
create_UnaryOpcodeExtension_X1(int num)794 create_UnaryOpcodeExtension_X1(int num)
795 {
796 const unsigned int n = (unsigned int)num;
797 return (((tilegx_bundle_bits)(n & 0x3f)) << 43);
798 }
799
800 static __inline tilegx_bundle_bits
create_UnaryOpcodeExtension_Y0(int num)801 create_UnaryOpcodeExtension_Y0(int num)
802 {
803 const unsigned int n = (unsigned int)num;
804 return ((n & 0x3f) << 12);
805 }
806
807 static __inline tilegx_bundle_bits
create_UnaryOpcodeExtension_Y1(int num)808 create_UnaryOpcodeExtension_Y1(int num)
809 {
810 const unsigned int n = (unsigned int)num;
811 return (((tilegx_bundle_bits)(n & 0x3f)) << 43);
812 }
813
814
815 enum
816 {
817 ADDI_IMM8_OPCODE_X0 = 1,
818 ADDI_IMM8_OPCODE_X1 = 1,
819 ADDI_OPCODE_Y0 = 0,
820 ADDI_OPCODE_Y1 = 1,
821 ADDLI_OPCODE_X0 = 1,
822 ADDLI_OPCODE_X1 = 0,
823 ADDXI_IMM8_OPCODE_X0 = 2,
824 ADDXI_IMM8_OPCODE_X1 = 2,
825 ADDXI_OPCODE_Y0 = 1,
826 ADDXI_OPCODE_Y1 = 2,
827 ADDXLI_OPCODE_X0 = 2,
828 ADDXLI_OPCODE_X1 = 1,
829 ADDXSC_RRR_0_OPCODE_X0 = 1,
830 ADDXSC_RRR_0_OPCODE_X1 = 1,
831 ADDX_RRR_0_OPCODE_X0 = 2,
832 ADDX_RRR_0_OPCODE_X1 = 2,
833 ADDX_RRR_0_OPCODE_Y0 = 0,
834 ADDX_RRR_0_OPCODE_Y1 = 0,
835 ADD_RRR_0_OPCODE_X0 = 3,
836 ADD_RRR_0_OPCODE_X1 = 3,
837 ADD_RRR_0_OPCODE_Y0 = 1,
838 ADD_RRR_0_OPCODE_Y1 = 1,
839 ANDI_IMM8_OPCODE_X0 = 3,
840 ANDI_IMM8_OPCODE_X1 = 3,
841 ANDI_OPCODE_Y0 = 2,
842 ANDI_OPCODE_Y1 = 3,
843 AND_RRR_0_OPCODE_X0 = 4,
844 AND_RRR_0_OPCODE_X1 = 4,
845 AND_RRR_5_OPCODE_Y0 = 0,
846 AND_RRR_5_OPCODE_Y1 = 0,
847 BEQZT_BRANCH_OPCODE_X1 = 16,
848 BEQZ_BRANCH_OPCODE_X1 = 17,
849 BFEXTS_BF_OPCODE_X0 = 4,
850 BFEXTU_BF_OPCODE_X0 = 5,
851 BFINS_BF_OPCODE_X0 = 6,
852 BF_OPCODE_X0 = 3,
853 BGEZT_BRANCH_OPCODE_X1 = 18,
854 BGEZ_BRANCH_OPCODE_X1 = 19,
855 BGTZT_BRANCH_OPCODE_X1 = 20,
856 BGTZ_BRANCH_OPCODE_X1 = 21,
857 BLBCT_BRANCH_OPCODE_X1 = 22,
858 BLBC_BRANCH_OPCODE_X1 = 23,
859 BLBST_BRANCH_OPCODE_X1 = 24,
860 BLBS_BRANCH_OPCODE_X1 = 25,
861 BLEZT_BRANCH_OPCODE_X1 = 26,
862 BLEZ_BRANCH_OPCODE_X1 = 27,
863 BLTZT_BRANCH_OPCODE_X1 = 28,
864 BLTZ_BRANCH_OPCODE_X1 = 29,
865 BNEZT_BRANCH_OPCODE_X1 = 30,
866 BNEZ_BRANCH_OPCODE_X1 = 31,
867 BRANCH_OPCODE_X1 = 2,
868 CMOVEQZ_RRR_0_OPCODE_X0 = 5,
869 CMOVEQZ_RRR_4_OPCODE_Y0 = 0,
870 CMOVNEZ_RRR_0_OPCODE_X0 = 6,
871 CMOVNEZ_RRR_4_OPCODE_Y0 = 1,
872 CMPEQI_IMM8_OPCODE_X0 = 4,
873 CMPEQI_IMM8_OPCODE_X1 = 4,
874 CMPEQI_OPCODE_Y0 = 3,
875 CMPEQI_OPCODE_Y1 = 4,
876 CMPEQ_RRR_0_OPCODE_X0 = 7,
877 CMPEQ_RRR_0_OPCODE_X1 = 5,
878 CMPEQ_RRR_3_OPCODE_Y0 = 0,
879 CMPEQ_RRR_3_OPCODE_Y1 = 2,
880 CMPEXCH4_RRR_0_OPCODE_X1 = 6,
881 CMPEXCH_RRR_0_OPCODE_X1 = 7,
882 CMPLES_RRR_0_OPCODE_X0 = 8,
883 CMPLES_RRR_0_OPCODE_X1 = 8,
884 CMPLES_RRR_2_OPCODE_Y0 = 0,
885 CMPLES_RRR_2_OPCODE_Y1 = 0,
886 CMPLEU_RRR_0_OPCODE_X0 = 9,
887 CMPLEU_RRR_0_OPCODE_X1 = 9,
888 CMPLEU_RRR_2_OPCODE_Y0 = 1,
889 CMPLEU_RRR_2_OPCODE_Y1 = 1,
890 CMPLTSI_IMM8_OPCODE_X0 = 5,
891 CMPLTSI_IMM8_OPCODE_X1 = 5,
892 CMPLTSI_OPCODE_Y0 = 4,
893 CMPLTSI_OPCODE_Y1 = 5,
894 CMPLTS_RRR_0_OPCODE_X0 = 10,
895 CMPLTS_RRR_0_OPCODE_X1 = 10,
896 CMPLTS_RRR_2_OPCODE_Y0 = 2,
897 CMPLTS_RRR_2_OPCODE_Y1 = 2,
898 CMPLTUI_IMM8_OPCODE_X0 = 6,
899 CMPLTUI_IMM8_OPCODE_X1 = 6,
900 CMPLTU_RRR_0_OPCODE_X0 = 11,
901 CMPLTU_RRR_0_OPCODE_X1 = 11,
902 CMPLTU_RRR_2_OPCODE_Y0 = 3,
903 CMPLTU_RRR_2_OPCODE_Y1 = 3,
904 CMPNE_RRR_0_OPCODE_X0 = 12,
905 CMPNE_RRR_0_OPCODE_X1 = 12,
906 CMPNE_RRR_3_OPCODE_Y0 = 1,
907 CMPNE_RRR_3_OPCODE_Y1 = 3,
908 CMULAF_RRR_0_OPCODE_X0 = 13,
909 CMULA_RRR_0_OPCODE_X0 = 14,
910 CMULFR_RRR_0_OPCODE_X0 = 15,
911 CMULF_RRR_0_OPCODE_X0 = 16,
912 CMULHR_RRR_0_OPCODE_X0 = 17,
913 CMULH_RRR_0_OPCODE_X0 = 18,
914 CMUL_RRR_0_OPCODE_X0 = 19,
915 CNTLZ_UNARY_OPCODE_X0 = 1,
916 CNTLZ_UNARY_OPCODE_Y0 = 1,
917 CNTTZ_UNARY_OPCODE_X0 = 2,
918 CNTTZ_UNARY_OPCODE_Y0 = 2,
919 CRC32_32_RRR_0_OPCODE_X0 = 20,
920 CRC32_8_RRR_0_OPCODE_X0 = 21,
921 DBLALIGN2_RRR_0_OPCODE_X0 = 22,
922 DBLALIGN2_RRR_0_OPCODE_X1 = 13,
923 DBLALIGN4_RRR_0_OPCODE_X0 = 23,
924 DBLALIGN4_RRR_0_OPCODE_X1 = 14,
925 DBLALIGN6_RRR_0_OPCODE_X0 = 24,
926 DBLALIGN6_RRR_0_OPCODE_X1 = 15,
927 DBLALIGN_RRR_0_OPCODE_X0 = 25,
928 DRAIN_UNARY_OPCODE_X1 = 1,
929 DTLBPR_UNARY_OPCODE_X1 = 2,
930 EXCH4_RRR_0_OPCODE_X1 = 16,
931 EXCH_RRR_0_OPCODE_X1 = 17,
932 FDOUBLE_ADDSUB_RRR_0_OPCODE_X0 = 26,
933 FDOUBLE_ADD_FLAGS_RRR_0_OPCODE_X0 = 27,
934 FDOUBLE_MUL_FLAGS_RRR_0_OPCODE_X0 = 28,
935 FDOUBLE_PACK1_RRR_0_OPCODE_X0 = 29,
936 FDOUBLE_PACK2_RRR_0_OPCODE_X0 = 30,
937 FDOUBLE_SUB_FLAGS_RRR_0_OPCODE_X0 = 31,
938 FDOUBLE_UNPACK_MAX_RRR_0_OPCODE_X0 = 32,
939 FDOUBLE_UNPACK_MIN_RRR_0_OPCODE_X0 = 33,
940 FETCHADD4_RRR_0_OPCODE_X1 = 18,
941 FETCHADDGEZ4_RRR_0_OPCODE_X1 = 19,
942 FETCHADDGEZ_RRR_0_OPCODE_X1 = 20,
943 FETCHADD_RRR_0_OPCODE_X1 = 21,
944 FETCHAND4_RRR_0_OPCODE_X1 = 22,
945 FETCHAND_RRR_0_OPCODE_X1 = 23,
946 FETCHOR4_RRR_0_OPCODE_X1 = 24,
947 FETCHOR_RRR_0_OPCODE_X1 = 25,
948 FINV_UNARY_OPCODE_X1 = 3,
949 FLUSHWB_UNARY_OPCODE_X1 = 4,
950 FLUSH_UNARY_OPCODE_X1 = 5,
951 FNOP_UNARY_OPCODE_X0 = 3,
952 FNOP_UNARY_OPCODE_X1 = 6,
953 FNOP_UNARY_OPCODE_Y0 = 3,
954 FNOP_UNARY_OPCODE_Y1 = 8,
955 FSINGLE_ADD1_RRR_0_OPCODE_X0 = 34,
956 FSINGLE_ADDSUB2_RRR_0_OPCODE_X0 = 35,
957 FSINGLE_MUL1_RRR_0_OPCODE_X0 = 36,
958 FSINGLE_MUL2_RRR_0_OPCODE_X0 = 37,
959 FSINGLE_PACK1_UNARY_OPCODE_X0 = 4,
960 FSINGLE_PACK1_UNARY_OPCODE_Y0 = 4,
961 FSINGLE_PACK2_RRR_0_OPCODE_X0 = 38,
962 FSINGLE_SUB1_RRR_0_OPCODE_X0 = 39,
963 ICOH_UNARY_OPCODE_X1 = 7,
964 ILL_UNARY_OPCODE_X1 = 8,
965 ILL_UNARY_OPCODE_Y1 = 9,
966 IMM8_OPCODE_X0 = 4,
967 IMM8_OPCODE_X1 = 3,
968 INV_UNARY_OPCODE_X1 = 9,
969 IRET_UNARY_OPCODE_X1 = 10,
970 JALRP_UNARY_OPCODE_X1 = 11,
971 JALRP_UNARY_OPCODE_Y1 = 10,
972 JALR_UNARY_OPCODE_X1 = 12,
973 JALR_UNARY_OPCODE_Y1 = 11,
974 JAL_JUMP_OPCODE_X1 = 0,
975 JRP_UNARY_OPCODE_X1 = 13,
976 JRP_UNARY_OPCODE_Y1 = 12,
977 JR_UNARY_OPCODE_X1 = 14,
978 JR_UNARY_OPCODE_Y1 = 13,
979 JUMP_OPCODE_X1 = 4,
980 J_JUMP_OPCODE_X1 = 1,
981 LD1S_ADD_IMM8_OPCODE_X1 = 7,
982 LD1S_OPCODE_Y2 = 0,
983 LD1S_UNARY_OPCODE_X1 = 15,
984 LD1U_ADD_IMM8_OPCODE_X1 = 8,
985 LD1U_OPCODE_Y2 = 1,
986 LD1U_UNARY_OPCODE_X1 = 16,
987 LD2S_ADD_IMM8_OPCODE_X1 = 9,
988 LD2S_OPCODE_Y2 = 2,
989 LD2S_UNARY_OPCODE_X1 = 17,
990 LD2U_ADD_IMM8_OPCODE_X1 = 10,
991 LD2U_OPCODE_Y2 = 3,
992 LD2U_UNARY_OPCODE_X1 = 18,
993 LD4S_ADD_IMM8_OPCODE_X1 = 11,
994 LD4S_OPCODE_Y2 = 1,
995 LD4S_UNARY_OPCODE_X1 = 19,
996 LD4U_ADD_IMM8_OPCODE_X1 = 12,
997 LD4U_OPCODE_Y2 = 2,
998 LD4U_UNARY_OPCODE_X1 = 20,
999 LDNA_ADD_IMM8_OPCODE_X1 = 21,
1000 LDNA_UNARY_OPCODE_X1 = 21,
1001 LDNT1S_ADD_IMM8_OPCODE_X1 = 13,
1002 LDNT1S_UNARY_OPCODE_X1 = 22,
1003 LDNT1U_ADD_IMM8_OPCODE_X1 = 14,
1004 LDNT1U_UNARY_OPCODE_X1 = 23,
1005 LDNT2S_ADD_IMM8_OPCODE_X1 = 15,
1006 LDNT2S_UNARY_OPCODE_X1 = 24,
1007 LDNT2U_ADD_IMM8_OPCODE_X1 = 16,
1008 LDNT2U_UNARY_OPCODE_X1 = 25,
1009 LDNT4S_ADD_IMM8_OPCODE_X1 = 17,
1010 LDNT4S_UNARY_OPCODE_X1 = 26,
1011 LDNT4U_ADD_IMM8_OPCODE_X1 = 18,
1012 LDNT4U_UNARY_OPCODE_X1 = 27,
1013 LDNT_ADD_IMM8_OPCODE_X1 = 19,
1014 LDNT_UNARY_OPCODE_X1 = 28,
1015 LD_ADD_IMM8_OPCODE_X1 = 20,
1016 LD_OPCODE_Y2 = 3,
1017 LD_UNARY_OPCODE_X1 = 29,
1018 LNK_UNARY_OPCODE_X1 = 30,
1019 LNK_UNARY_OPCODE_Y1 = 14,
1020 MFSPR_IMM8_OPCODE_X1 = 22,
1021 MF_UNARY_OPCODE_X1 = 31,
1022 MM_BF_OPCODE_X0 = 7,
1023 MNZ_RRR_0_OPCODE_X0 = 40,
1024 MNZ_RRR_0_OPCODE_X1 = 26,
1025 MNZ_RRR_4_OPCODE_Y0 = 2,
1026 MNZ_RRR_4_OPCODE_Y1 = 2,
1027 MODE_OPCODE_YA2 = 1,
1028 MODE_OPCODE_YB2 = 2,
1029 MODE_OPCODE_YC2 = 3,
1030 MTSPR_IMM8_OPCODE_X1 = 23,
1031 MULAX_RRR_0_OPCODE_X0 = 41,
1032 MULAX_RRR_3_OPCODE_Y0 = 2,
1033 MULA_HS_HS_RRR_0_OPCODE_X0 = 42,
1034 MULA_HS_HS_RRR_9_OPCODE_Y0 = 0,
1035 MULA_HS_HU_RRR_0_OPCODE_X0 = 43,
1036 MULA_HS_LS_RRR_0_OPCODE_X0 = 44,
1037 MULA_HS_LU_RRR_0_OPCODE_X0 = 45,
1038 MULA_HU_HU_RRR_0_OPCODE_X0 = 46,
1039 MULA_HU_HU_RRR_9_OPCODE_Y0 = 1,
1040 MULA_HU_LS_RRR_0_OPCODE_X0 = 47,
1041 MULA_HU_LU_RRR_0_OPCODE_X0 = 48,
1042 MULA_LS_LS_RRR_0_OPCODE_X0 = 49,
1043 MULA_LS_LS_RRR_9_OPCODE_Y0 = 2,
1044 MULA_LS_LU_RRR_0_OPCODE_X0 = 50,
1045 MULA_LU_LU_RRR_0_OPCODE_X0 = 51,
1046 MULA_LU_LU_RRR_9_OPCODE_Y0 = 3,
1047 MULX_RRR_0_OPCODE_X0 = 52,
1048 MULX_RRR_3_OPCODE_Y0 = 3,
1049 MUL_HS_HS_RRR_0_OPCODE_X0 = 53,
1050 MUL_HS_HS_RRR_8_OPCODE_Y0 = 0,
1051 MUL_HS_HU_RRR_0_OPCODE_X0 = 54,
1052 MUL_HS_LS_RRR_0_OPCODE_X0 = 55,
1053 MUL_HS_LU_RRR_0_OPCODE_X0 = 56,
1054 MUL_HU_HU_RRR_0_OPCODE_X0 = 57,
1055 MUL_HU_HU_RRR_8_OPCODE_Y0 = 1,
1056 MUL_HU_LS_RRR_0_OPCODE_X0 = 58,
1057 MUL_HU_LU_RRR_0_OPCODE_X0 = 59,
1058 MUL_LS_LS_RRR_0_OPCODE_X0 = 60,
1059 MUL_LS_LS_RRR_8_OPCODE_Y0 = 2,
1060 MUL_LS_LU_RRR_0_OPCODE_X0 = 61,
1061 MUL_LU_LU_RRR_0_OPCODE_X0 = 62,
1062 MUL_LU_LU_RRR_8_OPCODE_Y0 = 3,
1063 MZ_RRR_0_OPCODE_X0 = 63,
1064 MZ_RRR_0_OPCODE_X1 = 27,
1065 MZ_RRR_4_OPCODE_Y0 = 3,
1066 MZ_RRR_4_OPCODE_Y1 = 3,
1067 NAP_UNARY_OPCODE_X1 = 32,
1068 NOP_UNARY_OPCODE_X0 = 5,
1069 NOP_UNARY_OPCODE_X1 = 33,
1070 NOP_UNARY_OPCODE_Y0 = 5,
1071 NOP_UNARY_OPCODE_Y1 = 15,
1072 NOR_RRR_0_OPCODE_X0 = 64,
1073 NOR_RRR_0_OPCODE_X1 = 28,
1074 NOR_RRR_5_OPCODE_Y0 = 1,
1075 NOR_RRR_5_OPCODE_Y1 = 1,
1076 ORI_IMM8_OPCODE_X0 = 7,
1077 ORI_IMM8_OPCODE_X1 = 24,
1078 OR_RRR_0_OPCODE_X0 = 65,
1079 OR_RRR_0_OPCODE_X1 = 29,
1080 OR_RRR_5_OPCODE_Y0 = 2,
1081 OR_RRR_5_OPCODE_Y1 = 2,
1082 PCNT_UNARY_OPCODE_X0 = 6,
1083 PCNT_UNARY_OPCODE_Y0 = 6,
1084 REVBITS_UNARY_OPCODE_X0 = 7,
1085 REVBITS_UNARY_OPCODE_Y0 = 7,
1086 REVBYTES_UNARY_OPCODE_X0 = 8,
1087 REVBYTES_UNARY_OPCODE_Y0 = 8,
1088 ROTLI_SHIFT_OPCODE_X0 = 1,
1089 ROTLI_SHIFT_OPCODE_X1 = 1,
1090 ROTLI_SHIFT_OPCODE_Y0 = 0,
1091 ROTLI_SHIFT_OPCODE_Y1 = 0,
1092 ROTL_RRR_0_OPCODE_X0 = 66,
1093 ROTL_RRR_0_OPCODE_X1 = 30,
1094 ROTL_RRR_6_OPCODE_Y0 = 0,
1095 ROTL_RRR_6_OPCODE_Y1 = 0,
1096 RRR_0_OPCODE_X0 = 5,
1097 RRR_0_OPCODE_X1 = 5,
1098 RRR_0_OPCODE_Y0 = 5,
1099 RRR_0_OPCODE_Y1 = 6,
1100 RRR_1_OPCODE_Y0 = 6,
1101 RRR_1_OPCODE_Y1 = 7,
1102 RRR_2_OPCODE_Y0 = 7,
1103 RRR_2_OPCODE_Y1 = 8,
1104 RRR_3_OPCODE_Y0 = 8,
1105 RRR_3_OPCODE_Y1 = 9,
1106 RRR_4_OPCODE_Y0 = 9,
1107 RRR_4_OPCODE_Y1 = 10,
1108 RRR_5_OPCODE_Y0 = 10,
1109 RRR_5_OPCODE_Y1 = 11,
1110 RRR_6_OPCODE_Y0 = 11,
1111 RRR_6_OPCODE_Y1 = 12,
1112 RRR_7_OPCODE_Y0 = 12,
1113 RRR_7_OPCODE_Y1 = 13,
1114 RRR_8_OPCODE_Y0 = 13,
1115 RRR_9_OPCODE_Y0 = 14,
1116 SHIFT_OPCODE_X0 = 6,
1117 SHIFT_OPCODE_X1 = 6,
1118 SHIFT_OPCODE_Y0 = 15,
1119 SHIFT_OPCODE_Y1 = 14,
1120 SHL16INSLI_OPCODE_X0 = 7,
1121 SHL16INSLI_OPCODE_X1 = 7,
1122 SHL1ADDX_RRR_0_OPCODE_X0 = 67,
1123 SHL1ADDX_RRR_0_OPCODE_X1 = 31,
1124 SHL1ADDX_RRR_7_OPCODE_Y0 = 1,
1125 SHL1ADDX_RRR_7_OPCODE_Y1 = 1,
1126 SHL1ADD_RRR_0_OPCODE_X0 = 68,
1127 SHL1ADD_RRR_0_OPCODE_X1 = 32,
1128 SHL1ADD_RRR_1_OPCODE_Y0 = 0,
1129 SHL1ADD_RRR_1_OPCODE_Y1 = 0,
1130 SHL2ADDX_RRR_0_OPCODE_X0 = 69,
1131 SHL2ADDX_RRR_0_OPCODE_X1 = 33,
1132 SHL2ADDX_RRR_7_OPCODE_Y0 = 2,
1133 SHL2ADDX_RRR_7_OPCODE_Y1 = 2,
1134 SHL2ADD_RRR_0_OPCODE_X0 = 70,
1135 SHL2ADD_RRR_0_OPCODE_X1 = 34,
1136 SHL2ADD_RRR_1_OPCODE_Y0 = 1,
1137 SHL2ADD_RRR_1_OPCODE_Y1 = 1,
1138 SHL3ADDX_RRR_0_OPCODE_X0 = 71,
1139 SHL3ADDX_RRR_0_OPCODE_X1 = 35,
1140 SHL3ADDX_RRR_7_OPCODE_Y0 = 3,
1141 SHL3ADDX_RRR_7_OPCODE_Y1 = 3,
1142 SHL3ADD_RRR_0_OPCODE_X0 = 72,
1143 SHL3ADD_RRR_0_OPCODE_X1 = 36,
1144 SHL3ADD_RRR_1_OPCODE_Y0 = 2,
1145 SHL3ADD_RRR_1_OPCODE_Y1 = 2,
1146 SHLI_SHIFT_OPCODE_X0 = 2,
1147 SHLI_SHIFT_OPCODE_X1 = 2,
1148 SHLI_SHIFT_OPCODE_Y0 = 1,
1149 SHLI_SHIFT_OPCODE_Y1 = 1,
1150 SHLXI_SHIFT_OPCODE_X0 = 3,
1151 SHLXI_SHIFT_OPCODE_X1 = 3,
1152 SHLX_RRR_0_OPCODE_X0 = 73,
1153 SHLX_RRR_0_OPCODE_X1 = 37,
1154 SHL_RRR_0_OPCODE_X0 = 74,
1155 SHL_RRR_0_OPCODE_X1 = 38,
1156 SHL_RRR_6_OPCODE_Y0 = 1,
1157 SHL_RRR_6_OPCODE_Y1 = 1,
1158 SHRSI_SHIFT_OPCODE_X0 = 4,
1159 SHRSI_SHIFT_OPCODE_X1 = 4,
1160 SHRSI_SHIFT_OPCODE_Y0 = 2,
1161 SHRSI_SHIFT_OPCODE_Y1 = 2,
1162 SHRS_RRR_0_OPCODE_X0 = 75,
1163 SHRS_RRR_0_OPCODE_X1 = 39,
1164 SHRS_RRR_6_OPCODE_Y0 = 2,
1165 SHRS_RRR_6_OPCODE_Y1 = 2,
1166 SHRUI_SHIFT_OPCODE_X0 = 5,
1167 SHRUI_SHIFT_OPCODE_X1 = 5,
1168 SHRUI_SHIFT_OPCODE_Y0 = 3,
1169 SHRUI_SHIFT_OPCODE_Y1 = 3,
1170 SHRUXI_SHIFT_OPCODE_X0 = 6,
1171 SHRUXI_SHIFT_OPCODE_X1 = 6,
1172 SHRUX_RRR_0_OPCODE_X0 = 76,
1173 SHRUX_RRR_0_OPCODE_X1 = 40,
1174 SHRU_RRR_0_OPCODE_X0 = 77,
1175 SHRU_RRR_0_OPCODE_X1 = 41,
1176 SHRU_RRR_6_OPCODE_Y0 = 3,
1177 SHRU_RRR_6_OPCODE_Y1 = 3,
1178 SHUFFLEBYTES_RRR_0_OPCODE_X0 = 78,
1179 ST1_ADD_IMM8_OPCODE_X1 = 25,
1180 ST1_OPCODE_Y2 = 0,
1181 ST1_RRR_0_OPCODE_X1 = 42,
1182 ST2_ADD_IMM8_OPCODE_X1 = 26,
1183 ST2_OPCODE_Y2 = 1,
1184 ST2_RRR_0_OPCODE_X1 = 43,
1185 ST4_ADD_IMM8_OPCODE_X1 = 27,
1186 ST4_OPCODE_Y2 = 2,
1187 ST4_RRR_0_OPCODE_X1 = 44,
1188 STNT1_ADD_IMM8_OPCODE_X1 = 28,
1189 STNT1_RRR_0_OPCODE_X1 = 45,
1190 STNT2_ADD_IMM8_OPCODE_X1 = 29,
1191 STNT2_RRR_0_OPCODE_X1 = 46,
1192 STNT4_ADD_IMM8_OPCODE_X1 = 30,
1193 STNT4_RRR_0_OPCODE_X1 = 47,
1194 STNT_ADD_IMM8_OPCODE_X1 = 31,
1195 STNT_RRR_0_OPCODE_X1 = 48,
1196 ST_ADD_IMM8_OPCODE_X1 = 32,
1197 ST_OPCODE_Y2 = 3,
1198 ST_RRR_0_OPCODE_X1 = 49,
1199 SUBXSC_RRR_0_OPCODE_X0 = 79,
1200 SUBXSC_RRR_0_OPCODE_X1 = 50,
1201 SUBX_RRR_0_OPCODE_X0 = 80,
1202 SUBX_RRR_0_OPCODE_X1 = 51,
1203 SUBX_RRR_0_OPCODE_Y0 = 2,
1204 SUBX_RRR_0_OPCODE_Y1 = 2,
1205 SUB_RRR_0_OPCODE_X0 = 81,
1206 SUB_RRR_0_OPCODE_X1 = 52,
1207 SUB_RRR_0_OPCODE_Y0 = 3,
1208 SUB_RRR_0_OPCODE_Y1 = 3,
1209 SWINT0_UNARY_OPCODE_X1 = 34,
1210 SWINT1_UNARY_OPCODE_X1 = 35,
1211 SWINT2_UNARY_OPCODE_X1 = 36,
1212 SWINT3_UNARY_OPCODE_X1 = 37,
1213 TBLIDXB0_UNARY_OPCODE_X0 = 9,
1214 TBLIDXB0_UNARY_OPCODE_Y0 = 9,
1215 TBLIDXB1_UNARY_OPCODE_X0 = 10,
1216 TBLIDXB1_UNARY_OPCODE_Y0 = 10,
1217 TBLIDXB2_UNARY_OPCODE_X0 = 11,
1218 TBLIDXB2_UNARY_OPCODE_Y0 = 11,
1219 TBLIDXB3_UNARY_OPCODE_X0 = 12,
1220 TBLIDXB3_UNARY_OPCODE_Y0 = 12,
1221 UNARY_RRR_0_OPCODE_X0 = 82,
1222 UNARY_RRR_0_OPCODE_X1 = 53,
1223 UNARY_RRR_1_OPCODE_Y0 = 3,
1224 UNARY_RRR_1_OPCODE_Y1 = 3,
1225 V1ADDI_IMM8_OPCODE_X0 = 8,
1226 V1ADDI_IMM8_OPCODE_X1 = 33,
1227 V1ADDUC_RRR_0_OPCODE_X0 = 83,
1228 V1ADDUC_RRR_0_OPCODE_X1 = 54,
1229 V1ADD_RRR_0_OPCODE_X0 = 84,
1230 V1ADD_RRR_0_OPCODE_X1 = 55,
1231 V1ADIFFU_RRR_0_OPCODE_X0 = 85,
1232 V1AVGU_RRR_0_OPCODE_X0 = 86,
1233 V1CMPEQI_IMM8_OPCODE_X0 = 9,
1234 V1CMPEQI_IMM8_OPCODE_X1 = 34,
1235 V1CMPEQ_RRR_0_OPCODE_X0 = 87,
1236 V1CMPEQ_RRR_0_OPCODE_X1 = 56,
1237 V1CMPLES_RRR_0_OPCODE_X0 = 88,
1238 V1CMPLES_RRR_0_OPCODE_X1 = 57,
1239 V1CMPLEU_RRR_0_OPCODE_X0 = 89,
1240 V1CMPLEU_RRR_0_OPCODE_X1 = 58,
1241 V1CMPLTSI_IMM8_OPCODE_X0 = 10,
1242 V1CMPLTSI_IMM8_OPCODE_X1 = 35,
1243 V1CMPLTS_RRR_0_OPCODE_X0 = 90,
1244 V1CMPLTS_RRR_0_OPCODE_X1 = 59,
1245 V1CMPLTUI_IMM8_OPCODE_X0 = 11,
1246 V1CMPLTUI_IMM8_OPCODE_X1 = 36,
1247 V1CMPLTU_RRR_0_OPCODE_X0 = 91,
1248 V1CMPLTU_RRR_0_OPCODE_X1 = 60,
1249 V1CMPNE_RRR_0_OPCODE_X0 = 92,
1250 V1CMPNE_RRR_0_OPCODE_X1 = 61,
1251 V1DDOTPUA_RRR_0_OPCODE_X0 = 161,
1252 V1DDOTPUSA_RRR_0_OPCODE_X0 = 93,
1253 V1DDOTPUS_RRR_0_OPCODE_X0 = 94,
1254 V1DDOTPU_RRR_0_OPCODE_X0 = 162,
1255 V1DOTPA_RRR_0_OPCODE_X0 = 95,
1256 V1DOTPUA_RRR_0_OPCODE_X0 = 163,
1257 V1DOTPUSA_RRR_0_OPCODE_X0 = 96,
1258 V1DOTPUS_RRR_0_OPCODE_X0 = 97,
1259 V1DOTPU_RRR_0_OPCODE_X0 = 164,
1260 V1DOTP_RRR_0_OPCODE_X0 = 98,
1261 V1INT_H_RRR_0_OPCODE_X0 = 99,
1262 V1INT_H_RRR_0_OPCODE_X1 = 62,
1263 V1INT_L_RRR_0_OPCODE_X0 = 100,
1264 V1INT_L_RRR_0_OPCODE_X1 = 63,
1265 V1MAXUI_IMM8_OPCODE_X0 = 12,
1266 V1MAXUI_IMM8_OPCODE_X1 = 37,
1267 V1MAXU_RRR_0_OPCODE_X0 = 101,
1268 V1MAXU_RRR_0_OPCODE_X1 = 64,
1269 V1MINUI_IMM8_OPCODE_X0 = 13,
1270 V1MINUI_IMM8_OPCODE_X1 = 38,
1271 V1MINU_RRR_0_OPCODE_X0 = 102,
1272 V1MINU_RRR_0_OPCODE_X1 = 65,
1273 V1MNZ_RRR_0_OPCODE_X0 = 103,
1274 V1MNZ_RRR_0_OPCODE_X1 = 66,
1275 V1MULTU_RRR_0_OPCODE_X0 = 104,
1276 V1MULUS_RRR_0_OPCODE_X0 = 105,
1277 V1MULU_RRR_0_OPCODE_X0 = 106,
1278 V1MZ_RRR_0_OPCODE_X0 = 107,
1279 V1MZ_RRR_0_OPCODE_X1 = 67,
1280 V1SADAU_RRR_0_OPCODE_X0 = 108,
1281 V1SADU_RRR_0_OPCODE_X0 = 109,
1282 V1SHLI_SHIFT_OPCODE_X0 = 7,
1283 V1SHLI_SHIFT_OPCODE_X1 = 7,
1284 V1SHL_RRR_0_OPCODE_X0 = 110,
1285 V1SHL_RRR_0_OPCODE_X1 = 68,
1286 V1SHRSI_SHIFT_OPCODE_X0 = 8,
1287 V1SHRSI_SHIFT_OPCODE_X1 = 8,
1288 V1SHRS_RRR_0_OPCODE_X0 = 111,
1289 V1SHRS_RRR_0_OPCODE_X1 = 69,
1290 V1SHRUI_SHIFT_OPCODE_X0 = 9,
1291 V1SHRUI_SHIFT_OPCODE_X1 = 9,
1292 V1SHRU_RRR_0_OPCODE_X0 = 112,
1293 V1SHRU_RRR_0_OPCODE_X1 = 70,
1294 V1SUBUC_RRR_0_OPCODE_X0 = 113,
1295 V1SUBUC_RRR_0_OPCODE_X1 = 71,
1296 V1SUB_RRR_0_OPCODE_X0 = 114,
1297 V1SUB_RRR_0_OPCODE_X1 = 72,
1298 V2ADDI_IMM8_OPCODE_X0 = 14,
1299 V2ADDI_IMM8_OPCODE_X1 = 39,
1300 V2ADDSC_RRR_0_OPCODE_X0 = 115,
1301 V2ADDSC_RRR_0_OPCODE_X1 = 73,
1302 V2ADD_RRR_0_OPCODE_X0 = 116,
1303 V2ADD_RRR_0_OPCODE_X1 = 74,
1304 V2ADIFFS_RRR_0_OPCODE_X0 = 117,
1305 V2AVGS_RRR_0_OPCODE_X0 = 118,
1306 V2CMPEQI_IMM8_OPCODE_X0 = 15,
1307 V2CMPEQI_IMM8_OPCODE_X1 = 40,
1308 V2CMPEQ_RRR_0_OPCODE_X0 = 119,
1309 V2CMPEQ_RRR_0_OPCODE_X1 = 75,
1310 V2CMPLES_RRR_0_OPCODE_X0 = 120,
1311 V2CMPLES_RRR_0_OPCODE_X1 = 76,
1312 V2CMPLEU_RRR_0_OPCODE_X0 = 121,
1313 V2CMPLEU_RRR_0_OPCODE_X1 = 77,
1314 V2CMPLTSI_IMM8_OPCODE_X0 = 16,
1315 V2CMPLTSI_IMM8_OPCODE_X1 = 41,
1316 V2CMPLTS_RRR_0_OPCODE_X0 = 122,
1317 V2CMPLTS_RRR_0_OPCODE_X1 = 78,
1318 V2CMPLTUI_IMM8_OPCODE_X0 = 17,
1319 V2CMPLTUI_IMM8_OPCODE_X1 = 42,
1320 V2CMPLTU_RRR_0_OPCODE_X0 = 123,
1321 V2CMPLTU_RRR_0_OPCODE_X1 = 79,
1322 V2CMPNE_RRR_0_OPCODE_X0 = 124,
1323 V2CMPNE_RRR_0_OPCODE_X1 = 80,
1324 V2DOTPA_RRR_0_OPCODE_X0 = 125,
1325 V2DOTP_RRR_0_OPCODE_X0 = 126,
1326 V2INT_H_RRR_0_OPCODE_X0 = 127,
1327 V2INT_H_RRR_0_OPCODE_X1 = 81,
1328 V2INT_L_RRR_0_OPCODE_X0 = 128,
1329 V2INT_L_RRR_0_OPCODE_X1 = 82,
1330 V2MAXSI_IMM8_OPCODE_X0 = 18,
1331 V2MAXSI_IMM8_OPCODE_X1 = 43,
1332 V2MAXS_RRR_0_OPCODE_X0 = 129,
1333 V2MAXS_RRR_0_OPCODE_X1 = 83,
1334 V2MINSI_IMM8_OPCODE_X0 = 19,
1335 V2MINSI_IMM8_OPCODE_X1 = 44,
1336 V2MINS_RRR_0_OPCODE_X0 = 130,
1337 V2MINS_RRR_0_OPCODE_X1 = 84,
1338 V2MNZ_RRR_0_OPCODE_X0 = 131,
1339 V2MNZ_RRR_0_OPCODE_X1 = 85,
1340 V2MULFSC_RRR_0_OPCODE_X0 = 132,
1341 V2MULS_RRR_0_OPCODE_X0 = 133,
1342 V2MULTS_RRR_0_OPCODE_X0 = 134,
1343 V2MZ_RRR_0_OPCODE_X0 = 135,
1344 V2MZ_RRR_0_OPCODE_X1 = 86,
1345 V2PACKH_RRR_0_OPCODE_X0 = 136,
1346 V2PACKH_RRR_0_OPCODE_X1 = 87,
1347 V2PACKL_RRR_0_OPCODE_X0 = 137,
1348 V2PACKL_RRR_0_OPCODE_X1 = 88,
1349 V2PACKUC_RRR_0_OPCODE_X0 = 138,
1350 V2PACKUC_RRR_0_OPCODE_X1 = 89,
1351 V2SADAS_RRR_0_OPCODE_X0 = 139,
1352 V2SADAU_RRR_0_OPCODE_X0 = 140,
1353 V2SADS_RRR_0_OPCODE_X0 = 141,
1354 V2SADU_RRR_0_OPCODE_X0 = 142,
1355 V2SHLI_SHIFT_OPCODE_X0 = 10,
1356 V2SHLI_SHIFT_OPCODE_X1 = 10,
1357 V2SHLSC_RRR_0_OPCODE_X0 = 143,
1358 V2SHLSC_RRR_0_OPCODE_X1 = 90,
1359 V2SHL_RRR_0_OPCODE_X0 = 144,
1360 V2SHL_RRR_0_OPCODE_X1 = 91,
1361 V2SHRSI_SHIFT_OPCODE_X0 = 11,
1362 V2SHRSI_SHIFT_OPCODE_X1 = 11,
1363 V2SHRS_RRR_0_OPCODE_X0 = 145,
1364 V2SHRS_RRR_0_OPCODE_X1 = 92,
1365 V2SHRUI_SHIFT_OPCODE_X0 = 12,
1366 V2SHRUI_SHIFT_OPCODE_X1 = 12,
1367 V2SHRU_RRR_0_OPCODE_X0 = 146,
1368 V2SHRU_RRR_0_OPCODE_X1 = 93,
1369 V2SUBSC_RRR_0_OPCODE_X0 = 147,
1370 V2SUBSC_RRR_0_OPCODE_X1 = 94,
1371 V2SUB_RRR_0_OPCODE_X0 = 148,
1372 V2SUB_RRR_0_OPCODE_X1 = 95,
1373 V4ADDSC_RRR_0_OPCODE_X0 = 149,
1374 V4ADDSC_RRR_0_OPCODE_X1 = 96,
1375 V4ADD_RRR_0_OPCODE_X0 = 150,
1376 V4ADD_RRR_0_OPCODE_X1 = 97,
1377 V4INT_H_RRR_0_OPCODE_X0 = 151,
1378 V4INT_H_RRR_0_OPCODE_X1 = 98,
1379 V4INT_L_RRR_0_OPCODE_X0 = 152,
1380 V4INT_L_RRR_0_OPCODE_X1 = 99,
1381 V4PACKSC_RRR_0_OPCODE_X0 = 153,
1382 V4PACKSC_RRR_0_OPCODE_X1 = 100,
1383 V4SHLSC_RRR_0_OPCODE_X0 = 154,
1384 V4SHLSC_RRR_0_OPCODE_X1 = 101,
1385 V4SHL_RRR_0_OPCODE_X0 = 155,
1386 V4SHL_RRR_0_OPCODE_X1 = 102,
1387 V4SHRS_RRR_0_OPCODE_X0 = 156,
1388 V4SHRS_RRR_0_OPCODE_X1 = 103,
1389 V4SHRU_RRR_0_OPCODE_X0 = 157,
1390 V4SHRU_RRR_0_OPCODE_X1 = 104,
1391 V4SUBSC_RRR_0_OPCODE_X0 = 158,
1392 V4SUBSC_RRR_0_OPCODE_X1 = 105,
1393 V4SUB_RRR_0_OPCODE_X0 = 159,
1394 V4SUB_RRR_0_OPCODE_X1 = 106,
1395 WH64_UNARY_OPCODE_X1 = 38,
1396 XORI_IMM8_OPCODE_X0 = 20,
1397 XORI_IMM8_OPCODE_X1 = 45,
1398 XOR_RRR_0_OPCODE_X0 = 160,
1399 XOR_RRR_0_OPCODE_X1 = 107,
1400 XOR_RRR_5_OPCODE_Y0 = 3,
1401 XOR_RRR_5_OPCODE_Y1 = 3
1402 };
1403
1404
1405 #endif /* __ASSEMBLER__ */
1406
1407 #endif /* __ARCH_OPCODE_H__ */
1408