• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* TILE-Gx opcode information.
2  *
3  * Copyright 2011 Tilera Corporation. All Rights Reserved.
4  *
5  *   This program is free software; you can redistribute it and/or
6  *   modify it under the terms of the GNU General Public License
7  *   as published by the Free Software Foundation, version 2.
8  *
9  *   This program is distributed in the hope that it will be useful, but
10  *   WITHOUT ANY WARRANTY; without even the implied warranty of
11  *   MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE, GOOD TITLE or
12  *   NON INFRINGEMENT.  See the GNU General Public License for
13  *   more details.
14  *
15  *
16  *
17  *
18  *
19  */
20 
21 #ifndef __ARCH_OPCODE_H__
22 #define __ARCH_OPCODE_H__
23 
24 #ifndef __ASSEMBLER__
25 
26 typedef unsigned long long tilegx_bundle_bits;
27 
28 /* These are the bits that determine if a bundle is in the X encoding. */
29 #define TILEGX_BUNDLE_MODE_MASK ((tilegx_bundle_bits)3 << 62)
30 
31 enum
32 {
33   /* Maximum number of instructions in a bundle (2 for X, 3 for Y). */
34   TILEGX_MAX_INSTRUCTIONS_PER_BUNDLE = 3,
35 
36   /* How many different pipeline encodings are there? X0, X1, Y0, Y1, Y2. */
37   TILEGX_NUM_PIPELINE_ENCODINGS = 5,
38 
39   /* Log base 2 of TILEGX_BUNDLE_SIZE_IN_BYTES. */
40   TILEGX_LOG2_BUNDLE_SIZE_IN_BYTES = 3,
41 
42   /* Instructions take this many bytes. */
43   TILEGX_BUNDLE_SIZE_IN_BYTES = 1 << TILEGX_LOG2_BUNDLE_SIZE_IN_BYTES,
44 
45   /* Log base 2 of TILEGX_BUNDLE_ALIGNMENT_IN_BYTES. */
46   TILEGX_LOG2_BUNDLE_ALIGNMENT_IN_BYTES = 3,
47 
48   /* Bundles should be aligned modulo this number of bytes. */
49   TILEGX_BUNDLE_ALIGNMENT_IN_BYTES =
50     (1 << TILEGX_LOG2_BUNDLE_ALIGNMENT_IN_BYTES),
51 
52   /* Number of registers (some are magic, such as network I/O). */
53   TILEGX_NUM_REGISTERS = 64,
54 };
55 
56 /* Make a few "tile_" variables to simplify common code between
57    architectures.  */
58 
59 typedef tilegx_bundle_bits tile_bundle_bits;
60 #define TILE_BUNDLE_SIZE_IN_BYTES TILEGX_BUNDLE_SIZE_IN_BYTES
61 #define TILE_BUNDLE_ALIGNMENT_IN_BYTES TILEGX_BUNDLE_ALIGNMENT_IN_BYTES
62 #define TILE_LOG2_BUNDLE_ALIGNMENT_IN_BYTES \
63   TILEGX_LOG2_BUNDLE_ALIGNMENT_IN_BYTES
64 #define TILE_BPT_BUNDLE TILEGX_BPT_BUNDLE
65 
66 /* 64-bit pattern for a { bpt ; nop } bundle. */
67 #define TILEGX_BPT_BUNDLE 0x286a44ae51485000ULL
68 
69 static __inline unsigned int
get_BFEnd_X0(tilegx_bundle_bits num)70 get_BFEnd_X0(tilegx_bundle_bits num)
71 {
72   const unsigned int n = (unsigned int)num;
73   return (((n >> 12)) & 0x3f);
74 }
75 
76 static __inline unsigned int
get_BFOpcodeExtension_X0(tilegx_bundle_bits num)77 get_BFOpcodeExtension_X0(tilegx_bundle_bits num)
78 {
79   const unsigned int n = (unsigned int)num;
80   return (((n >> 24)) & 0xf);
81 }
82 
83 static __inline unsigned int
get_BFStart_X0(tilegx_bundle_bits num)84 get_BFStart_X0(tilegx_bundle_bits num)
85 {
86   const unsigned int n = (unsigned int)num;
87   return (((n >> 18)) & 0x3f);
88 }
89 
90 static __inline unsigned int
get_BrOff_X1(tilegx_bundle_bits n)91 get_BrOff_X1(tilegx_bundle_bits n)
92 {
93   return (((unsigned int)(n >> 31)) & 0x0000003f) |
94          (((unsigned int)(n >> 37)) & 0x0001ffc0);
95 }
96 
97 static __inline unsigned int
get_BrType_X1(tilegx_bundle_bits n)98 get_BrType_X1(tilegx_bundle_bits n)
99 {
100   return (((unsigned int)(n >> 54)) & 0x1f);
101 }
102 
103 static __inline unsigned int
get_Dest_Imm8_X1(tilegx_bundle_bits n)104 get_Dest_Imm8_X1(tilegx_bundle_bits n)
105 {
106   return (((unsigned int)(n >> 31)) & 0x0000003f) |
107          (((unsigned int)(n >> 43)) & 0x000000c0);
108 }
109 
110 static __inline unsigned int
get_Dest_X0(tilegx_bundle_bits num)111 get_Dest_X0(tilegx_bundle_bits num)
112 {
113   const unsigned int n = (unsigned int)num;
114   return (((n >> 0)) & 0x3f);
115 }
116 
117 static __inline unsigned int
get_Dest_X1(tilegx_bundle_bits n)118 get_Dest_X1(tilegx_bundle_bits n)
119 {
120   return (((unsigned int)(n >> 31)) & 0x3f);
121 }
122 
123 static __inline unsigned int
get_Dest_Y0(tilegx_bundle_bits num)124 get_Dest_Y0(tilegx_bundle_bits num)
125 {
126   const unsigned int n = (unsigned int)num;
127   return (((n >> 0)) & 0x3f);
128 }
129 
130 static __inline unsigned int
get_Dest_Y1(tilegx_bundle_bits n)131 get_Dest_Y1(tilegx_bundle_bits n)
132 {
133   return (((unsigned int)(n >> 31)) & 0x3f);
134 }
135 
136 static __inline unsigned int
get_Imm16_X0(tilegx_bundle_bits num)137 get_Imm16_X0(tilegx_bundle_bits num)
138 {
139   const unsigned int n = (unsigned int)num;
140   return (((n >> 12)) & 0xffff);
141 }
142 
143 static __inline unsigned int
get_Imm16_X1(tilegx_bundle_bits n)144 get_Imm16_X1(tilegx_bundle_bits n)
145 {
146   return (((unsigned int)(n >> 43)) & 0xffff);
147 }
148 
149 static __inline unsigned int
get_Imm8OpcodeExtension_X0(tilegx_bundle_bits num)150 get_Imm8OpcodeExtension_X0(tilegx_bundle_bits num)
151 {
152   const unsigned int n = (unsigned int)num;
153   return (((n >> 20)) & 0xff);
154 }
155 
156 static __inline unsigned int
get_Imm8OpcodeExtension_X1(tilegx_bundle_bits n)157 get_Imm8OpcodeExtension_X1(tilegx_bundle_bits n)
158 {
159   return (((unsigned int)(n >> 51)) & 0xff);
160 }
161 
162 static __inline unsigned int
get_Imm8_X0(tilegx_bundle_bits num)163 get_Imm8_X0(tilegx_bundle_bits num)
164 {
165   const unsigned int n = (unsigned int)num;
166   return (((n >> 12)) & 0xff);
167 }
168 
169 static __inline unsigned int
get_Imm8_X1(tilegx_bundle_bits n)170 get_Imm8_X1(tilegx_bundle_bits n)
171 {
172   return (((unsigned int)(n >> 43)) & 0xff);
173 }
174 
175 static __inline unsigned int
get_Imm8_Y0(tilegx_bundle_bits num)176 get_Imm8_Y0(tilegx_bundle_bits num)
177 {
178   const unsigned int n = (unsigned int)num;
179   return (((n >> 12)) & 0xff);
180 }
181 
182 static __inline unsigned int
get_Imm8_Y1(tilegx_bundle_bits n)183 get_Imm8_Y1(tilegx_bundle_bits n)
184 {
185   return (((unsigned int)(n >> 43)) & 0xff);
186 }
187 
188 static __inline unsigned int
get_JumpOff_X1(tilegx_bundle_bits n)189 get_JumpOff_X1(tilegx_bundle_bits n)
190 {
191   return (((unsigned int)(n >> 31)) & 0x7ffffff);
192 }
193 
194 static __inline unsigned int
get_JumpOpcodeExtension_X1(tilegx_bundle_bits n)195 get_JumpOpcodeExtension_X1(tilegx_bundle_bits n)
196 {
197   return (((unsigned int)(n >> 58)) & 0x1);
198 }
199 
200 static __inline unsigned int
get_MF_Imm14_X1(tilegx_bundle_bits n)201 get_MF_Imm14_X1(tilegx_bundle_bits n)
202 {
203   return (((unsigned int)(n >> 37)) & 0x3fff);
204 }
205 
206 static __inline unsigned int
get_MT_Imm14_X1(tilegx_bundle_bits n)207 get_MT_Imm14_X1(tilegx_bundle_bits n)
208 {
209   return (((unsigned int)(n >> 31)) & 0x0000003f) |
210          (((unsigned int)(n >> 37)) & 0x00003fc0);
211 }
212 
213 static __inline unsigned int
get_Mode(tilegx_bundle_bits n)214 get_Mode(tilegx_bundle_bits n)
215 {
216   return (((unsigned int)(n >> 62)) & 0x3);
217 }
218 
219 static __inline unsigned int
get_Opcode_X0(tilegx_bundle_bits num)220 get_Opcode_X0(tilegx_bundle_bits num)
221 {
222   const unsigned int n = (unsigned int)num;
223   return (((n >> 28)) & 0x7);
224 }
225 
226 static __inline unsigned int
get_Opcode_X1(tilegx_bundle_bits n)227 get_Opcode_X1(tilegx_bundle_bits n)
228 {
229   return (((unsigned int)(n >> 59)) & 0x7);
230 }
231 
232 static __inline unsigned int
get_Opcode_Y0(tilegx_bundle_bits num)233 get_Opcode_Y0(tilegx_bundle_bits num)
234 {
235   const unsigned int n = (unsigned int)num;
236   return (((n >> 27)) & 0xf);
237 }
238 
239 static __inline unsigned int
get_Opcode_Y1(tilegx_bundle_bits n)240 get_Opcode_Y1(tilegx_bundle_bits n)
241 {
242   return (((unsigned int)(n >> 58)) & 0xf);
243 }
244 
245 static __inline unsigned int
get_Opcode_Y2(tilegx_bundle_bits n)246 get_Opcode_Y2(tilegx_bundle_bits n)
247 {
248   return (((n >> 26)) & 0x00000001) |
249          (((unsigned int)(n >> 56)) & 0x00000002);
250 }
251 
252 static __inline unsigned int
get_RRROpcodeExtension_X0(tilegx_bundle_bits num)253 get_RRROpcodeExtension_X0(tilegx_bundle_bits num)
254 {
255   const unsigned int n = (unsigned int)num;
256   return (((n >> 18)) & 0x3ff);
257 }
258 
259 static __inline unsigned int
get_RRROpcodeExtension_X1(tilegx_bundle_bits n)260 get_RRROpcodeExtension_X1(tilegx_bundle_bits n)
261 {
262   return (((unsigned int)(n >> 49)) & 0x3ff);
263 }
264 
265 static __inline unsigned int
get_RRROpcodeExtension_Y0(tilegx_bundle_bits num)266 get_RRROpcodeExtension_Y0(tilegx_bundle_bits num)
267 {
268   const unsigned int n = (unsigned int)num;
269   return (((n >> 18)) & 0x3);
270 }
271 
272 static __inline unsigned int
get_RRROpcodeExtension_Y1(tilegx_bundle_bits n)273 get_RRROpcodeExtension_Y1(tilegx_bundle_bits n)
274 {
275   return (((unsigned int)(n >> 49)) & 0x3);
276 }
277 
278 static __inline unsigned int
get_ShAmt_X0(tilegx_bundle_bits num)279 get_ShAmt_X0(tilegx_bundle_bits num)
280 {
281   const unsigned int n = (unsigned int)num;
282   return (((n >> 12)) & 0x3f);
283 }
284 
285 static __inline unsigned int
get_ShAmt_X1(tilegx_bundle_bits n)286 get_ShAmt_X1(tilegx_bundle_bits n)
287 {
288   return (((unsigned int)(n >> 43)) & 0x3f);
289 }
290 
291 static __inline unsigned int
get_ShAmt_Y0(tilegx_bundle_bits num)292 get_ShAmt_Y0(tilegx_bundle_bits num)
293 {
294   const unsigned int n = (unsigned int)num;
295   return (((n >> 12)) & 0x3f);
296 }
297 
298 static __inline unsigned int
get_ShAmt_Y1(tilegx_bundle_bits n)299 get_ShAmt_Y1(tilegx_bundle_bits n)
300 {
301   return (((unsigned int)(n >> 43)) & 0x3f);
302 }
303 
304 static __inline unsigned int
get_ShiftOpcodeExtension_X0(tilegx_bundle_bits num)305 get_ShiftOpcodeExtension_X0(tilegx_bundle_bits num)
306 {
307   const unsigned int n = (unsigned int)num;
308   return (((n >> 18)) & 0x3ff);
309 }
310 
311 static __inline unsigned int
get_ShiftOpcodeExtension_X1(tilegx_bundle_bits n)312 get_ShiftOpcodeExtension_X1(tilegx_bundle_bits n)
313 {
314   return (((unsigned int)(n >> 49)) & 0x3ff);
315 }
316 
317 static __inline unsigned int
get_ShiftOpcodeExtension_Y0(tilegx_bundle_bits num)318 get_ShiftOpcodeExtension_Y0(tilegx_bundle_bits num)
319 {
320   const unsigned int n = (unsigned int)num;
321   return (((n >> 18)) & 0x3);
322 }
323 
324 static __inline unsigned int
get_ShiftOpcodeExtension_Y1(tilegx_bundle_bits n)325 get_ShiftOpcodeExtension_Y1(tilegx_bundle_bits n)
326 {
327   return (((unsigned int)(n >> 49)) & 0x3);
328 }
329 
330 static __inline unsigned int
get_SrcA_X0(tilegx_bundle_bits num)331 get_SrcA_X0(tilegx_bundle_bits num)
332 {
333   const unsigned int n = (unsigned int)num;
334   return (((n >> 6)) & 0x3f);
335 }
336 
337 static __inline unsigned int
get_SrcA_X1(tilegx_bundle_bits n)338 get_SrcA_X1(tilegx_bundle_bits n)
339 {
340   return (((unsigned int)(n >> 37)) & 0x3f);
341 }
342 
343 static __inline unsigned int
get_SrcA_Y0(tilegx_bundle_bits num)344 get_SrcA_Y0(tilegx_bundle_bits num)
345 {
346   const unsigned int n = (unsigned int)num;
347   return (((n >> 6)) & 0x3f);
348 }
349 
350 static __inline unsigned int
get_SrcA_Y1(tilegx_bundle_bits n)351 get_SrcA_Y1(tilegx_bundle_bits n)
352 {
353   return (((unsigned int)(n >> 37)) & 0x3f);
354 }
355 
356 static __inline unsigned int
get_SrcA_Y2(tilegx_bundle_bits num)357 get_SrcA_Y2(tilegx_bundle_bits num)
358 {
359   const unsigned int n = (unsigned int)num;
360   return (((n >> 20)) & 0x3f);
361 }
362 
363 static __inline unsigned int
get_SrcBDest_Y2(tilegx_bundle_bits n)364 get_SrcBDest_Y2(tilegx_bundle_bits n)
365 {
366   return (((unsigned int)(n >> 51)) & 0x3f);
367 }
368 
369 static __inline unsigned int
get_SrcB_X0(tilegx_bundle_bits num)370 get_SrcB_X0(tilegx_bundle_bits num)
371 {
372   const unsigned int n = (unsigned int)num;
373   return (((n >> 12)) & 0x3f);
374 }
375 
376 static __inline unsigned int
get_SrcB_X1(tilegx_bundle_bits n)377 get_SrcB_X1(tilegx_bundle_bits n)
378 {
379   return (((unsigned int)(n >> 43)) & 0x3f);
380 }
381 
382 static __inline unsigned int
get_SrcB_Y0(tilegx_bundle_bits num)383 get_SrcB_Y0(tilegx_bundle_bits num)
384 {
385   const unsigned int n = (unsigned int)num;
386   return (((n >> 12)) & 0x3f);
387 }
388 
389 static __inline unsigned int
get_SrcB_Y1(tilegx_bundle_bits n)390 get_SrcB_Y1(tilegx_bundle_bits n)
391 {
392   return (((unsigned int)(n >> 43)) & 0x3f);
393 }
394 
395 static __inline unsigned int
get_UnaryOpcodeExtension_X0(tilegx_bundle_bits num)396 get_UnaryOpcodeExtension_X0(tilegx_bundle_bits num)
397 {
398   const unsigned int n = (unsigned int)num;
399   return (((n >> 12)) & 0x3f);
400 }
401 
402 static __inline unsigned int
get_UnaryOpcodeExtension_X1(tilegx_bundle_bits n)403 get_UnaryOpcodeExtension_X1(tilegx_bundle_bits n)
404 {
405   return (((unsigned int)(n >> 43)) & 0x3f);
406 }
407 
408 static __inline unsigned int
get_UnaryOpcodeExtension_Y0(tilegx_bundle_bits num)409 get_UnaryOpcodeExtension_Y0(tilegx_bundle_bits num)
410 {
411   const unsigned int n = (unsigned int)num;
412   return (((n >> 12)) & 0x3f);
413 }
414 
415 static __inline unsigned int
get_UnaryOpcodeExtension_Y1(tilegx_bundle_bits n)416 get_UnaryOpcodeExtension_Y1(tilegx_bundle_bits n)
417 {
418   return (((unsigned int)(n >> 43)) & 0x3f);
419 }
420 
421 
422 static __inline int
sign_extend(int n,int num_bits)423 sign_extend(int n, int num_bits)
424 {
425   int shift = (int)(sizeof(int) * 8 - num_bits);
426   return (n << shift) >> shift;
427 }
428 
429 
430 
431 static __inline tilegx_bundle_bits
create_BFEnd_X0(int num)432 create_BFEnd_X0(int num)
433 {
434   const unsigned int n = (unsigned int)num;
435   return ((n & 0x3f) << 12);
436 }
437 
438 static __inline tilegx_bundle_bits
create_BFOpcodeExtension_X0(int num)439 create_BFOpcodeExtension_X0(int num)
440 {
441   const unsigned int n = (unsigned int)num;
442   return ((n & 0xf) << 24);
443 }
444 
445 static __inline tilegx_bundle_bits
create_BFStart_X0(int num)446 create_BFStart_X0(int num)
447 {
448   const unsigned int n = (unsigned int)num;
449   return ((n & 0x3f) << 18);
450 }
451 
452 static __inline tilegx_bundle_bits
create_BrOff_X1(int num)453 create_BrOff_X1(int num)
454 {
455   const unsigned int n = (unsigned int)num;
456   return (((tilegx_bundle_bits)(n & 0x0000003f)) << 31) |
457          (((tilegx_bundle_bits)(n & 0x0001ffc0)) << 37);
458 }
459 
460 static __inline tilegx_bundle_bits
create_BrType_X1(int num)461 create_BrType_X1(int num)
462 {
463   const unsigned int n = (unsigned int)num;
464   return (((tilegx_bundle_bits)(n & 0x1f)) << 54);
465 }
466 
467 static __inline tilegx_bundle_bits
create_Dest_Imm8_X1(int num)468 create_Dest_Imm8_X1(int num)
469 {
470   const unsigned int n = (unsigned int)num;
471   return (((tilegx_bundle_bits)(n & 0x0000003f)) << 31) |
472          (((tilegx_bundle_bits)(n & 0x000000c0)) << 43);
473 }
474 
475 static __inline tilegx_bundle_bits
create_Dest_X0(int num)476 create_Dest_X0(int num)
477 {
478   const unsigned int n = (unsigned int)num;
479   return ((n & 0x3f) << 0);
480 }
481 
482 static __inline tilegx_bundle_bits
create_Dest_X1(int num)483 create_Dest_X1(int num)
484 {
485   const unsigned int n = (unsigned int)num;
486   return (((tilegx_bundle_bits)(n & 0x3f)) << 31);
487 }
488 
489 static __inline tilegx_bundle_bits
create_Dest_Y0(int num)490 create_Dest_Y0(int num)
491 {
492   const unsigned int n = (unsigned int)num;
493   return ((n & 0x3f) << 0);
494 }
495 
496 static __inline tilegx_bundle_bits
create_Dest_Y1(int num)497 create_Dest_Y1(int num)
498 {
499   const unsigned int n = (unsigned int)num;
500   return (((tilegx_bundle_bits)(n & 0x3f)) << 31);
501 }
502 
503 static __inline tilegx_bundle_bits
create_Imm16_X0(int num)504 create_Imm16_X0(int num)
505 {
506   const unsigned int n = (unsigned int)num;
507   return ((n & 0xffff) << 12);
508 }
509 
510 static __inline tilegx_bundle_bits
create_Imm16_X1(int num)511 create_Imm16_X1(int num)
512 {
513   const unsigned int n = (unsigned int)num;
514   return (((tilegx_bundle_bits)(n & 0xffff)) << 43);
515 }
516 
517 static __inline tilegx_bundle_bits
create_Imm8OpcodeExtension_X0(int num)518 create_Imm8OpcodeExtension_X0(int num)
519 {
520   const unsigned int n = (unsigned int)num;
521   return ((n & 0xff) << 20);
522 }
523 
524 static __inline tilegx_bundle_bits
create_Imm8OpcodeExtension_X1(int num)525 create_Imm8OpcodeExtension_X1(int num)
526 {
527   const unsigned int n = (unsigned int)num;
528   return (((tilegx_bundle_bits)(n & 0xff)) << 51);
529 }
530 
531 static __inline tilegx_bundle_bits
create_Imm8_X0(int num)532 create_Imm8_X0(int num)
533 {
534   const unsigned int n = (unsigned int)num;
535   return ((n & 0xff) << 12);
536 }
537 
538 static __inline tilegx_bundle_bits
create_Imm8_X1(int num)539 create_Imm8_X1(int num)
540 {
541   const unsigned int n = (unsigned int)num;
542   return (((tilegx_bundle_bits)(n & 0xff)) << 43);
543 }
544 
545 static __inline tilegx_bundle_bits
create_Imm8_Y0(int num)546 create_Imm8_Y0(int num)
547 {
548   const unsigned int n = (unsigned int)num;
549   return ((n & 0xff) << 12);
550 }
551 
552 static __inline tilegx_bundle_bits
create_Imm8_Y1(int num)553 create_Imm8_Y1(int num)
554 {
555   const unsigned int n = (unsigned int)num;
556   return (((tilegx_bundle_bits)(n & 0xff)) << 43);
557 }
558 
559 static __inline tilegx_bundle_bits
create_JumpOff_X1(int num)560 create_JumpOff_X1(int num)
561 {
562   const unsigned int n = (unsigned int)num;
563   return (((tilegx_bundle_bits)(n & 0x7ffffff)) << 31);
564 }
565 
566 static __inline tilegx_bundle_bits
create_JumpOpcodeExtension_X1(int num)567 create_JumpOpcodeExtension_X1(int num)
568 {
569   const unsigned int n = (unsigned int)num;
570   return (((tilegx_bundle_bits)(n & 0x1)) << 58);
571 }
572 
573 static __inline tilegx_bundle_bits
create_MF_Imm14_X1(int num)574 create_MF_Imm14_X1(int num)
575 {
576   const unsigned int n = (unsigned int)num;
577   return (((tilegx_bundle_bits)(n & 0x3fff)) << 37);
578 }
579 
580 static __inline tilegx_bundle_bits
create_MT_Imm14_X1(int num)581 create_MT_Imm14_X1(int num)
582 {
583   const unsigned int n = (unsigned int)num;
584   return (((tilegx_bundle_bits)(n & 0x0000003f)) << 31) |
585          (((tilegx_bundle_bits)(n & 0x00003fc0)) << 37);
586 }
587 
588 static __inline tilegx_bundle_bits
create_Mode(int num)589 create_Mode(int num)
590 {
591   const unsigned int n = (unsigned int)num;
592   return (((tilegx_bundle_bits)(n & 0x3)) << 62);
593 }
594 
595 static __inline tilegx_bundle_bits
create_Opcode_X0(int num)596 create_Opcode_X0(int num)
597 {
598   const unsigned int n = (unsigned int)num;
599   return ((n & 0x7) << 28);
600 }
601 
602 static __inline tilegx_bundle_bits
create_Opcode_X1(int num)603 create_Opcode_X1(int num)
604 {
605   const unsigned int n = (unsigned int)num;
606   return (((tilegx_bundle_bits)(n & 0x7)) << 59);
607 }
608 
609 static __inline tilegx_bundle_bits
create_Opcode_Y0(int num)610 create_Opcode_Y0(int num)
611 {
612   const unsigned int n = (unsigned int)num;
613   return ((n & 0xf) << 27);
614 }
615 
616 static __inline tilegx_bundle_bits
create_Opcode_Y1(int num)617 create_Opcode_Y1(int num)
618 {
619   const unsigned int n = (unsigned int)num;
620   return (((tilegx_bundle_bits)(n & 0xf)) << 58);
621 }
622 
623 static __inline tilegx_bundle_bits
create_Opcode_Y2(int num)624 create_Opcode_Y2(int num)
625 {
626   const unsigned int n = (unsigned int)num;
627   return ((n & 0x00000001) << 26) |
628          (((tilegx_bundle_bits)(n & 0x00000002)) << 56);
629 }
630 
631 static __inline tilegx_bundle_bits
create_RRROpcodeExtension_X0(int num)632 create_RRROpcodeExtension_X0(int num)
633 {
634   const unsigned int n = (unsigned int)num;
635   return ((n & 0x3ff) << 18);
636 }
637 
638 static __inline tilegx_bundle_bits
create_RRROpcodeExtension_X1(int num)639 create_RRROpcodeExtension_X1(int num)
640 {
641   const unsigned int n = (unsigned int)num;
642   return (((tilegx_bundle_bits)(n & 0x3ff)) << 49);
643 }
644 
645 static __inline tilegx_bundle_bits
create_RRROpcodeExtension_Y0(int num)646 create_RRROpcodeExtension_Y0(int num)
647 {
648   const unsigned int n = (unsigned int)num;
649   return ((n & 0x3) << 18);
650 }
651 
652 static __inline tilegx_bundle_bits
create_RRROpcodeExtension_Y1(int num)653 create_RRROpcodeExtension_Y1(int num)
654 {
655   const unsigned int n = (unsigned int)num;
656   return (((tilegx_bundle_bits)(n & 0x3)) << 49);
657 }
658 
659 static __inline tilegx_bundle_bits
create_ShAmt_X0(int num)660 create_ShAmt_X0(int num)
661 {
662   const unsigned int n = (unsigned int)num;
663   return ((n & 0x3f) << 12);
664 }
665 
666 static __inline tilegx_bundle_bits
create_ShAmt_X1(int num)667 create_ShAmt_X1(int num)
668 {
669   const unsigned int n = (unsigned int)num;
670   return (((tilegx_bundle_bits)(n & 0x3f)) << 43);
671 }
672 
673 static __inline tilegx_bundle_bits
create_ShAmt_Y0(int num)674 create_ShAmt_Y0(int num)
675 {
676   const unsigned int n = (unsigned int)num;
677   return ((n & 0x3f) << 12);
678 }
679 
680 static __inline tilegx_bundle_bits
create_ShAmt_Y1(int num)681 create_ShAmt_Y1(int num)
682 {
683   const unsigned int n = (unsigned int)num;
684   return (((tilegx_bundle_bits)(n & 0x3f)) << 43);
685 }
686 
687 static __inline tilegx_bundle_bits
create_ShiftOpcodeExtension_X0(int num)688 create_ShiftOpcodeExtension_X0(int num)
689 {
690   const unsigned int n = (unsigned int)num;
691   return ((n & 0x3ff) << 18);
692 }
693 
694 static __inline tilegx_bundle_bits
create_ShiftOpcodeExtension_X1(int num)695 create_ShiftOpcodeExtension_X1(int num)
696 {
697   const unsigned int n = (unsigned int)num;
698   return (((tilegx_bundle_bits)(n & 0x3ff)) << 49);
699 }
700 
701 static __inline tilegx_bundle_bits
create_ShiftOpcodeExtension_Y0(int num)702 create_ShiftOpcodeExtension_Y0(int num)
703 {
704   const unsigned int n = (unsigned int)num;
705   return ((n & 0x3) << 18);
706 }
707 
708 static __inline tilegx_bundle_bits
create_ShiftOpcodeExtension_Y1(int num)709 create_ShiftOpcodeExtension_Y1(int num)
710 {
711   const unsigned int n = (unsigned int)num;
712   return (((tilegx_bundle_bits)(n & 0x3)) << 49);
713 }
714 
715 static __inline tilegx_bundle_bits
create_SrcA_X0(int num)716 create_SrcA_X0(int num)
717 {
718   const unsigned int n = (unsigned int)num;
719   return ((n & 0x3f) << 6);
720 }
721 
722 static __inline tilegx_bundle_bits
create_SrcA_X1(int num)723 create_SrcA_X1(int num)
724 {
725   const unsigned int n = (unsigned int)num;
726   return (((tilegx_bundle_bits)(n & 0x3f)) << 37);
727 }
728 
729 static __inline tilegx_bundle_bits
create_SrcA_Y0(int num)730 create_SrcA_Y0(int num)
731 {
732   const unsigned int n = (unsigned int)num;
733   return ((n & 0x3f) << 6);
734 }
735 
736 static __inline tilegx_bundle_bits
create_SrcA_Y1(int num)737 create_SrcA_Y1(int num)
738 {
739   const unsigned int n = (unsigned int)num;
740   return (((tilegx_bundle_bits)(n & 0x3f)) << 37);
741 }
742 
743 static __inline tilegx_bundle_bits
create_SrcA_Y2(int num)744 create_SrcA_Y2(int num)
745 {
746   const unsigned int n = (unsigned int)num;
747   return ((n & 0x3f) << 20);
748 }
749 
750 static __inline tilegx_bundle_bits
create_SrcBDest_Y2(int num)751 create_SrcBDest_Y2(int num)
752 {
753   const unsigned int n = (unsigned int)num;
754   return (((tilegx_bundle_bits)(n & 0x3f)) << 51);
755 }
756 
757 static __inline tilegx_bundle_bits
create_SrcB_X0(int num)758 create_SrcB_X0(int num)
759 {
760   const unsigned int n = (unsigned int)num;
761   return ((n & 0x3f) << 12);
762 }
763 
764 static __inline tilegx_bundle_bits
create_SrcB_X1(int num)765 create_SrcB_X1(int num)
766 {
767   const unsigned int n = (unsigned int)num;
768   return (((tilegx_bundle_bits)(n & 0x3f)) << 43);
769 }
770 
771 static __inline tilegx_bundle_bits
create_SrcB_Y0(int num)772 create_SrcB_Y0(int num)
773 {
774   const unsigned int n = (unsigned int)num;
775   return ((n & 0x3f) << 12);
776 }
777 
778 static __inline tilegx_bundle_bits
create_SrcB_Y1(int num)779 create_SrcB_Y1(int num)
780 {
781   const unsigned int n = (unsigned int)num;
782   return (((tilegx_bundle_bits)(n & 0x3f)) << 43);
783 }
784 
785 static __inline tilegx_bundle_bits
create_UnaryOpcodeExtension_X0(int num)786 create_UnaryOpcodeExtension_X0(int num)
787 {
788   const unsigned int n = (unsigned int)num;
789   return ((n & 0x3f) << 12);
790 }
791 
792 static __inline tilegx_bundle_bits
create_UnaryOpcodeExtension_X1(int num)793 create_UnaryOpcodeExtension_X1(int num)
794 {
795   const unsigned int n = (unsigned int)num;
796   return (((tilegx_bundle_bits)(n & 0x3f)) << 43);
797 }
798 
799 static __inline tilegx_bundle_bits
create_UnaryOpcodeExtension_Y0(int num)800 create_UnaryOpcodeExtension_Y0(int num)
801 {
802   const unsigned int n = (unsigned int)num;
803   return ((n & 0x3f) << 12);
804 }
805 
806 static __inline tilegx_bundle_bits
create_UnaryOpcodeExtension_Y1(int num)807 create_UnaryOpcodeExtension_Y1(int num)
808 {
809   const unsigned int n = (unsigned int)num;
810   return (((tilegx_bundle_bits)(n & 0x3f)) << 43);
811 }
812 
813 
814 enum
815 {
816   ADDI_IMM8_OPCODE_X0 = 1,
817   ADDI_IMM8_OPCODE_X1 = 1,
818   ADDI_OPCODE_Y0 = 0,
819   ADDI_OPCODE_Y1 = 1,
820   ADDLI_OPCODE_X0 = 1,
821   ADDLI_OPCODE_X1 = 0,
822   ADDXI_IMM8_OPCODE_X0 = 2,
823   ADDXI_IMM8_OPCODE_X1 = 2,
824   ADDXI_OPCODE_Y0 = 1,
825   ADDXI_OPCODE_Y1 = 2,
826   ADDXLI_OPCODE_X0 = 2,
827   ADDXLI_OPCODE_X1 = 1,
828   ADDXSC_RRR_0_OPCODE_X0 = 1,
829   ADDXSC_RRR_0_OPCODE_X1 = 1,
830   ADDX_RRR_0_OPCODE_X0 = 2,
831   ADDX_RRR_0_OPCODE_X1 = 2,
832   ADDX_RRR_0_OPCODE_Y0 = 0,
833   ADDX_RRR_0_OPCODE_Y1 = 0,
834   ADD_RRR_0_OPCODE_X0 = 3,
835   ADD_RRR_0_OPCODE_X1 = 3,
836   ADD_RRR_0_OPCODE_Y0 = 1,
837   ADD_RRR_0_OPCODE_Y1 = 1,
838   ANDI_IMM8_OPCODE_X0 = 3,
839   ANDI_IMM8_OPCODE_X1 = 3,
840   ANDI_OPCODE_Y0 = 2,
841   ANDI_OPCODE_Y1 = 3,
842   AND_RRR_0_OPCODE_X0 = 4,
843   AND_RRR_0_OPCODE_X1 = 4,
844   AND_RRR_5_OPCODE_Y0 = 0,
845   AND_RRR_5_OPCODE_Y1 = 0,
846   BEQZT_BRANCH_OPCODE_X1 = 16,
847   BEQZ_BRANCH_OPCODE_X1 = 17,
848   BFEXTS_BF_OPCODE_X0 = 4,
849   BFEXTU_BF_OPCODE_X0 = 5,
850   BFINS_BF_OPCODE_X0 = 6,
851   BF_OPCODE_X0 = 3,
852   BGEZT_BRANCH_OPCODE_X1 = 18,
853   BGEZ_BRANCH_OPCODE_X1 = 19,
854   BGTZT_BRANCH_OPCODE_X1 = 20,
855   BGTZ_BRANCH_OPCODE_X1 = 21,
856   BLBCT_BRANCH_OPCODE_X1 = 22,
857   BLBC_BRANCH_OPCODE_X1 = 23,
858   BLBST_BRANCH_OPCODE_X1 = 24,
859   BLBS_BRANCH_OPCODE_X1 = 25,
860   BLEZT_BRANCH_OPCODE_X1 = 26,
861   BLEZ_BRANCH_OPCODE_X1 = 27,
862   BLTZT_BRANCH_OPCODE_X1 = 28,
863   BLTZ_BRANCH_OPCODE_X1 = 29,
864   BNEZT_BRANCH_OPCODE_X1 = 30,
865   BNEZ_BRANCH_OPCODE_X1 = 31,
866   BRANCH_OPCODE_X1 = 2,
867   CMOVEQZ_RRR_0_OPCODE_X0 = 5,
868   CMOVEQZ_RRR_4_OPCODE_Y0 = 0,
869   CMOVNEZ_RRR_0_OPCODE_X0 = 6,
870   CMOVNEZ_RRR_4_OPCODE_Y0 = 1,
871   CMPEQI_IMM8_OPCODE_X0 = 4,
872   CMPEQI_IMM8_OPCODE_X1 = 4,
873   CMPEQI_OPCODE_Y0 = 3,
874   CMPEQI_OPCODE_Y1 = 4,
875   CMPEQ_RRR_0_OPCODE_X0 = 7,
876   CMPEQ_RRR_0_OPCODE_X1 = 5,
877   CMPEQ_RRR_3_OPCODE_Y0 = 0,
878   CMPEQ_RRR_3_OPCODE_Y1 = 2,
879   CMPEXCH4_RRR_0_OPCODE_X1 = 6,
880   CMPEXCH_RRR_0_OPCODE_X1 = 7,
881   CMPLES_RRR_0_OPCODE_X0 = 8,
882   CMPLES_RRR_0_OPCODE_X1 = 8,
883   CMPLES_RRR_2_OPCODE_Y0 = 0,
884   CMPLES_RRR_2_OPCODE_Y1 = 0,
885   CMPLEU_RRR_0_OPCODE_X0 = 9,
886   CMPLEU_RRR_0_OPCODE_X1 = 9,
887   CMPLEU_RRR_2_OPCODE_Y0 = 1,
888   CMPLEU_RRR_2_OPCODE_Y1 = 1,
889   CMPLTSI_IMM8_OPCODE_X0 = 5,
890   CMPLTSI_IMM8_OPCODE_X1 = 5,
891   CMPLTSI_OPCODE_Y0 = 4,
892   CMPLTSI_OPCODE_Y1 = 5,
893   CMPLTS_RRR_0_OPCODE_X0 = 10,
894   CMPLTS_RRR_0_OPCODE_X1 = 10,
895   CMPLTS_RRR_2_OPCODE_Y0 = 2,
896   CMPLTS_RRR_2_OPCODE_Y1 = 2,
897   CMPLTUI_IMM8_OPCODE_X0 = 6,
898   CMPLTUI_IMM8_OPCODE_X1 = 6,
899   CMPLTU_RRR_0_OPCODE_X0 = 11,
900   CMPLTU_RRR_0_OPCODE_X1 = 11,
901   CMPLTU_RRR_2_OPCODE_Y0 = 3,
902   CMPLTU_RRR_2_OPCODE_Y1 = 3,
903   CMPNE_RRR_0_OPCODE_X0 = 12,
904   CMPNE_RRR_0_OPCODE_X1 = 12,
905   CMPNE_RRR_3_OPCODE_Y0 = 1,
906   CMPNE_RRR_3_OPCODE_Y1 = 3,
907   CMULAF_RRR_0_OPCODE_X0 = 13,
908   CMULA_RRR_0_OPCODE_X0 = 14,
909   CMULFR_RRR_0_OPCODE_X0 = 15,
910   CMULF_RRR_0_OPCODE_X0 = 16,
911   CMULHR_RRR_0_OPCODE_X0 = 17,
912   CMULH_RRR_0_OPCODE_X0 = 18,
913   CMUL_RRR_0_OPCODE_X0 = 19,
914   CNTLZ_UNARY_OPCODE_X0 = 1,
915   CNTLZ_UNARY_OPCODE_Y0 = 1,
916   CNTTZ_UNARY_OPCODE_X0 = 2,
917   CNTTZ_UNARY_OPCODE_Y0 = 2,
918   CRC32_32_RRR_0_OPCODE_X0 = 20,
919   CRC32_8_RRR_0_OPCODE_X0 = 21,
920   DBLALIGN2_RRR_0_OPCODE_X0 = 22,
921   DBLALIGN2_RRR_0_OPCODE_X1 = 13,
922   DBLALIGN4_RRR_0_OPCODE_X0 = 23,
923   DBLALIGN4_RRR_0_OPCODE_X1 = 14,
924   DBLALIGN6_RRR_0_OPCODE_X0 = 24,
925   DBLALIGN6_RRR_0_OPCODE_X1 = 15,
926   DBLALIGN_RRR_0_OPCODE_X0 = 25,
927   DRAIN_UNARY_OPCODE_X1 = 1,
928   DTLBPR_UNARY_OPCODE_X1 = 2,
929   EXCH4_RRR_0_OPCODE_X1 = 16,
930   EXCH_RRR_0_OPCODE_X1 = 17,
931   FDOUBLE_ADDSUB_RRR_0_OPCODE_X0 = 26,
932   FDOUBLE_ADD_FLAGS_RRR_0_OPCODE_X0 = 27,
933   FDOUBLE_MUL_FLAGS_RRR_0_OPCODE_X0 = 28,
934   FDOUBLE_PACK1_RRR_0_OPCODE_X0 = 29,
935   FDOUBLE_PACK2_RRR_0_OPCODE_X0 = 30,
936   FDOUBLE_SUB_FLAGS_RRR_0_OPCODE_X0 = 31,
937   FDOUBLE_UNPACK_MAX_RRR_0_OPCODE_X0 = 32,
938   FDOUBLE_UNPACK_MIN_RRR_0_OPCODE_X0 = 33,
939   FETCHADD4_RRR_0_OPCODE_X1 = 18,
940   FETCHADDGEZ4_RRR_0_OPCODE_X1 = 19,
941   FETCHADDGEZ_RRR_0_OPCODE_X1 = 20,
942   FETCHADD_RRR_0_OPCODE_X1 = 21,
943   FETCHAND4_RRR_0_OPCODE_X1 = 22,
944   FETCHAND_RRR_0_OPCODE_X1 = 23,
945   FETCHOR4_RRR_0_OPCODE_X1 = 24,
946   FETCHOR_RRR_0_OPCODE_X1 = 25,
947   FINV_UNARY_OPCODE_X1 = 3,
948   FLUSHWB_UNARY_OPCODE_X1 = 4,
949   FLUSH_UNARY_OPCODE_X1 = 5,
950   FNOP_UNARY_OPCODE_X0 = 3,
951   FNOP_UNARY_OPCODE_X1 = 6,
952   FNOP_UNARY_OPCODE_Y0 = 3,
953   FNOP_UNARY_OPCODE_Y1 = 8,
954   FSINGLE_ADD1_RRR_0_OPCODE_X0 = 34,
955   FSINGLE_ADDSUB2_RRR_0_OPCODE_X0 = 35,
956   FSINGLE_MUL1_RRR_0_OPCODE_X0 = 36,
957   FSINGLE_MUL2_RRR_0_OPCODE_X0 = 37,
958   FSINGLE_PACK1_UNARY_OPCODE_X0 = 4,
959   FSINGLE_PACK1_UNARY_OPCODE_Y0 = 4,
960   FSINGLE_PACK2_RRR_0_OPCODE_X0 = 38,
961   FSINGLE_SUB1_RRR_0_OPCODE_X0 = 39,
962   ICOH_UNARY_OPCODE_X1 = 7,
963   ILL_UNARY_OPCODE_X1 = 8,
964   ILL_UNARY_OPCODE_Y1 = 9,
965   IMM8_OPCODE_X0 = 4,
966   IMM8_OPCODE_X1 = 3,
967   INV_UNARY_OPCODE_X1 = 9,
968   IRET_UNARY_OPCODE_X1 = 10,
969   JALRP_UNARY_OPCODE_X1 = 11,
970   JALRP_UNARY_OPCODE_Y1 = 10,
971   JALR_UNARY_OPCODE_X1 = 12,
972   JALR_UNARY_OPCODE_Y1 = 11,
973   JAL_JUMP_OPCODE_X1 = 0,
974   JRP_UNARY_OPCODE_X1 = 13,
975   JRP_UNARY_OPCODE_Y1 = 12,
976   JR_UNARY_OPCODE_X1 = 14,
977   JR_UNARY_OPCODE_Y1 = 13,
978   JUMP_OPCODE_X1 = 4,
979   J_JUMP_OPCODE_X1 = 1,
980   LD1S_ADD_IMM8_OPCODE_X1 = 7,
981   LD1S_OPCODE_Y2 = 0,
982   LD1S_UNARY_OPCODE_X1 = 15,
983   LD1U_ADD_IMM8_OPCODE_X1 = 8,
984   LD1U_OPCODE_Y2 = 1,
985   LD1U_UNARY_OPCODE_X1 = 16,
986   LD2S_ADD_IMM8_OPCODE_X1 = 9,
987   LD2S_OPCODE_Y2 = 2,
988   LD2S_UNARY_OPCODE_X1 = 17,
989   LD2U_ADD_IMM8_OPCODE_X1 = 10,
990   LD2U_OPCODE_Y2 = 3,
991   LD2U_UNARY_OPCODE_X1 = 18,
992   LD4S_ADD_IMM8_OPCODE_X1 = 11,
993   LD4S_OPCODE_Y2 = 1,
994   LD4S_UNARY_OPCODE_X1 = 19,
995   LD4U_ADD_IMM8_OPCODE_X1 = 12,
996   LD4U_OPCODE_Y2 = 2,
997   LD4U_UNARY_OPCODE_X1 = 20,
998   LDNA_ADD_IMM8_OPCODE_X1 = 21,
999   LDNA_UNARY_OPCODE_X1 = 21,
1000   LDNT1S_ADD_IMM8_OPCODE_X1 = 13,
1001   LDNT1S_UNARY_OPCODE_X1 = 22,
1002   LDNT1U_ADD_IMM8_OPCODE_X1 = 14,
1003   LDNT1U_UNARY_OPCODE_X1 = 23,
1004   LDNT2S_ADD_IMM8_OPCODE_X1 = 15,
1005   LDNT2S_UNARY_OPCODE_X1 = 24,
1006   LDNT2U_ADD_IMM8_OPCODE_X1 = 16,
1007   LDNT2U_UNARY_OPCODE_X1 = 25,
1008   LDNT4S_ADD_IMM8_OPCODE_X1 = 17,
1009   LDNT4S_UNARY_OPCODE_X1 = 26,
1010   LDNT4U_ADD_IMM8_OPCODE_X1 = 18,
1011   LDNT4U_UNARY_OPCODE_X1 = 27,
1012   LDNT_ADD_IMM8_OPCODE_X1 = 19,
1013   LDNT_UNARY_OPCODE_X1 = 28,
1014   LD_ADD_IMM8_OPCODE_X1 = 20,
1015   LD_OPCODE_Y2 = 3,
1016   LD_UNARY_OPCODE_X1 = 29,
1017   LNK_UNARY_OPCODE_X1 = 30,
1018   LNK_UNARY_OPCODE_Y1 = 14,
1019   MFSPR_IMM8_OPCODE_X1 = 22,
1020   MF_UNARY_OPCODE_X1 = 31,
1021   MM_BF_OPCODE_X0 = 7,
1022   MNZ_RRR_0_OPCODE_X0 = 40,
1023   MNZ_RRR_0_OPCODE_X1 = 26,
1024   MNZ_RRR_4_OPCODE_Y0 = 2,
1025   MNZ_RRR_4_OPCODE_Y1 = 2,
1026   MODE_OPCODE_YA2 = 1,
1027   MODE_OPCODE_YB2 = 2,
1028   MODE_OPCODE_YC2 = 3,
1029   MTSPR_IMM8_OPCODE_X1 = 23,
1030   MULAX_RRR_0_OPCODE_X0 = 41,
1031   MULAX_RRR_3_OPCODE_Y0 = 2,
1032   MULA_HS_HS_RRR_0_OPCODE_X0 = 42,
1033   MULA_HS_HS_RRR_9_OPCODE_Y0 = 0,
1034   MULA_HS_HU_RRR_0_OPCODE_X0 = 43,
1035   MULA_HS_LS_RRR_0_OPCODE_X0 = 44,
1036   MULA_HS_LU_RRR_0_OPCODE_X0 = 45,
1037   MULA_HU_HU_RRR_0_OPCODE_X0 = 46,
1038   MULA_HU_HU_RRR_9_OPCODE_Y0 = 1,
1039   MULA_HU_LS_RRR_0_OPCODE_X0 = 47,
1040   MULA_HU_LU_RRR_0_OPCODE_X0 = 48,
1041   MULA_LS_LS_RRR_0_OPCODE_X0 = 49,
1042   MULA_LS_LS_RRR_9_OPCODE_Y0 = 2,
1043   MULA_LS_LU_RRR_0_OPCODE_X0 = 50,
1044   MULA_LU_LU_RRR_0_OPCODE_X0 = 51,
1045   MULA_LU_LU_RRR_9_OPCODE_Y0 = 3,
1046   MULX_RRR_0_OPCODE_X0 = 52,
1047   MULX_RRR_3_OPCODE_Y0 = 3,
1048   MUL_HS_HS_RRR_0_OPCODE_X0 = 53,
1049   MUL_HS_HS_RRR_8_OPCODE_Y0 = 0,
1050   MUL_HS_HU_RRR_0_OPCODE_X0 = 54,
1051   MUL_HS_LS_RRR_0_OPCODE_X0 = 55,
1052   MUL_HS_LU_RRR_0_OPCODE_X0 = 56,
1053   MUL_HU_HU_RRR_0_OPCODE_X0 = 57,
1054   MUL_HU_HU_RRR_8_OPCODE_Y0 = 1,
1055   MUL_HU_LS_RRR_0_OPCODE_X0 = 58,
1056   MUL_HU_LU_RRR_0_OPCODE_X0 = 59,
1057   MUL_LS_LS_RRR_0_OPCODE_X0 = 60,
1058   MUL_LS_LS_RRR_8_OPCODE_Y0 = 2,
1059   MUL_LS_LU_RRR_0_OPCODE_X0 = 61,
1060   MUL_LU_LU_RRR_0_OPCODE_X0 = 62,
1061   MUL_LU_LU_RRR_8_OPCODE_Y0 = 3,
1062   MZ_RRR_0_OPCODE_X0 = 63,
1063   MZ_RRR_0_OPCODE_X1 = 27,
1064   MZ_RRR_4_OPCODE_Y0 = 3,
1065   MZ_RRR_4_OPCODE_Y1 = 3,
1066   NAP_UNARY_OPCODE_X1 = 32,
1067   NOP_UNARY_OPCODE_X0 = 5,
1068   NOP_UNARY_OPCODE_X1 = 33,
1069   NOP_UNARY_OPCODE_Y0 = 5,
1070   NOP_UNARY_OPCODE_Y1 = 15,
1071   NOR_RRR_0_OPCODE_X0 = 64,
1072   NOR_RRR_0_OPCODE_X1 = 28,
1073   NOR_RRR_5_OPCODE_Y0 = 1,
1074   NOR_RRR_5_OPCODE_Y1 = 1,
1075   ORI_IMM8_OPCODE_X0 = 7,
1076   ORI_IMM8_OPCODE_X1 = 24,
1077   OR_RRR_0_OPCODE_X0 = 65,
1078   OR_RRR_0_OPCODE_X1 = 29,
1079   OR_RRR_5_OPCODE_Y0 = 2,
1080   OR_RRR_5_OPCODE_Y1 = 2,
1081   PCNT_UNARY_OPCODE_X0 = 6,
1082   PCNT_UNARY_OPCODE_Y0 = 6,
1083   REVBITS_UNARY_OPCODE_X0 = 7,
1084   REVBITS_UNARY_OPCODE_Y0 = 7,
1085   REVBYTES_UNARY_OPCODE_X0 = 8,
1086   REVBYTES_UNARY_OPCODE_Y0 = 8,
1087   ROTLI_SHIFT_OPCODE_X0 = 1,
1088   ROTLI_SHIFT_OPCODE_X1 = 1,
1089   ROTLI_SHIFT_OPCODE_Y0 = 0,
1090   ROTLI_SHIFT_OPCODE_Y1 = 0,
1091   ROTL_RRR_0_OPCODE_X0 = 66,
1092   ROTL_RRR_0_OPCODE_X1 = 30,
1093   ROTL_RRR_6_OPCODE_Y0 = 0,
1094   ROTL_RRR_6_OPCODE_Y1 = 0,
1095   RRR_0_OPCODE_X0 = 5,
1096   RRR_0_OPCODE_X1 = 5,
1097   RRR_0_OPCODE_Y0 = 5,
1098   RRR_0_OPCODE_Y1 = 6,
1099   RRR_1_OPCODE_Y0 = 6,
1100   RRR_1_OPCODE_Y1 = 7,
1101   RRR_2_OPCODE_Y0 = 7,
1102   RRR_2_OPCODE_Y1 = 8,
1103   RRR_3_OPCODE_Y0 = 8,
1104   RRR_3_OPCODE_Y1 = 9,
1105   RRR_4_OPCODE_Y0 = 9,
1106   RRR_4_OPCODE_Y1 = 10,
1107   RRR_5_OPCODE_Y0 = 10,
1108   RRR_5_OPCODE_Y1 = 11,
1109   RRR_6_OPCODE_Y0 = 11,
1110   RRR_6_OPCODE_Y1 = 12,
1111   RRR_7_OPCODE_Y0 = 12,
1112   RRR_7_OPCODE_Y1 = 13,
1113   RRR_8_OPCODE_Y0 = 13,
1114   RRR_9_OPCODE_Y0 = 14,
1115   SHIFT_OPCODE_X0 = 6,
1116   SHIFT_OPCODE_X1 = 6,
1117   SHIFT_OPCODE_Y0 = 15,
1118   SHIFT_OPCODE_Y1 = 14,
1119   SHL16INSLI_OPCODE_X0 = 7,
1120   SHL16INSLI_OPCODE_X1 = 7,
1121   SHL1ADDX_RRR_0_OPCODE_X0 = 67,
1122   SHL1ADDX_RRR_0_OPCODE_X1 = 31,
1123   SHL1ADDX_RRR_7_OPCODE_Y0 = 1,
1124   SHL1ADDX_RRR_7_OPCODE_Y1 = 1,
1125   SHL1ADD_RRR_0_OPCODE_X0 = 68,
1126   SHL1ADD_RRR_0_OPCODE_X1 = 32,
1127   SHL1ADD_RRR_1_OPCODE_Y0 = 0,
1128   SHL1ADD_RRR_1_OPCODE_Y1 = 0,
1129   SHL2ADDX_RRR_0_OPCODE_X0 = 69,
1130   SHL2ADDX_RRR_0_OPCODE_X1 = 33,
1131   SHL2ADDX_RRR_7_OPCODE_Y0 = 2,
1132   SHL2ADDX_RRR_7_OPCODE_Y1 = 2,
1133   SHL2ADD_RRR_0_OPCODE_X0 = 70,
1134   SHL2ADD_RRR_0_OPCODE_X1 = 34,
1135   SHL2ADD_RRR_1_OPCODE_Y0 = 1,
1136   SHL2ADD_RRR_1_OPCODE_Y1 = 1,
1137   SHL3ADDX_RRR_0_OPCODE_X0 = 71,
1138   SHL3ADDX_RRR_0_OPCODE_X1 = 35,
1139   SHL3ADDX_RRR_7_OPCODE_Y0 = 3,
1140   SHL3ADDX_RRR_7_OPCODE_Y1 = 3,
1141   SHL3ADD_RRR_0_OPCODE_X0 = 72,
1142   SHL3ADD_RRR_0_OPCODE_X1 = 36,
1143   SHL3ADD_RRR_1_OPCODE_Y0 = 2,
1144   SHL3ADD_RRR_1_OPCODE_Y1 = 2,
1145   SHLI_SHIFT_OPCODE_X0 = 2,
1146   SHLI_SHIFT_OPCODE_X1 = 2,
1147   SHLI_SHIFT_OPCODE_Y0 = 1,
1148   SHLI_SHIFT_OPCODE_Y1 = 1,
1149   SHLXI_SHIFT_OPCODE_X0 = 3,
1150   SHLXI_SHIFT_OPCODE_X1 = 3,
1151   SHLX_RRR_0_OPCODE_X0 = 73,
1152   SHLX_RRR_0_OPCODE_X1 = 37,
1153   SHL_RRR_0_OPCODE_X0 = 74,
1154   SHL_RRR_0_OPCODE_X1 = 38,
1155   SHL_RRR_6_OPCODE_Y0 = 1,
1156   SHL_RRR_6_OPCODE_Y1 = 1,
1157   SHRSI_SHIFT_OPCODE_X0 = 4,
1158   SHRSI_SHIFT_OPCODE_X1 = 4,
1159   SHRSI_SHIFT_OPCODE_Y0 = 2,
1160   SHRSI_SHIFT_OPCODE_Y1 = 2,
1161   SHRS_RRR_0_OPCODE_X0 = 75,
1162   SHRS_RRR_0_OPCODE_X1 = 39,
1163   SHRS_RRR_6_OPCODE_Y0 = 2,
1164   SHRS_RRR_6_OPCODE_Y1 = 2,
1165   SHRUI_SHIFT_OPCODE_X0 = 5,
1166   SHRUI_SHIFT_OPCODE_X1 = 5,
1167   SHRUI_SHIFT_OPCODE_Y0 = 3,
1168   SHRUI_SHIFT_OPCODE_Y1 = 3,
1169   SHRUXI_SHIFT_OPCODE_X0 = 6,
1170   SHRUXI_SHIFT_OPCODE_X1 = 6,
1171   SHRUX_RRR_0_OPCODE_X0 = 76,
1172   SHRUX_RRR_0_OPCODE_X1 = 40,
1173   SHRU_RRR_0_OPCODE_X0 = 77,
1174   SHRU_RRR_0_OPCODE_X1 = 41,
1175   SHRU_RRR_6_OPCODE_Y0 = 3,
1176   SHRU_RRR_6_OPCODE_Y1 = 3,
1177   SHUFFLEBYTES_RRR_0_OPCODE_X0 = 78,
1178   ST1_ADD_IMM8_OPCODE_X1 = 25,
1179   ST1_OPCODE_Y2 = 0,
1180   ST1_RRR_0_OPCODE_X1 = 42,
1181   ST2_ADD_IMM8_OPCODE_X1 = 26,
1182   ST2_OPCODE_Y2 = 1,
1183   ST2_RRR_0_OPCODE_X1 = 43,
1184   ST4_ADD_IMM8_OPCODE_X1 = 27,
1185   ST4_OPCODE_Y2 = 2,
1186   ST4_RRR_0_OPCODE_X1 = 44,
1187   STNT1_ADD_IMM8_OPCODE_X1 = 28,
1188   STNT1_RRR_0_OPCODE_X1 = 45,
1189   STNT2_ADD_IMM8_OPCODE_X1 = 29,
1190   STNT2_RRR_0_OPCODE_X1 = 46,
1191   STNT4_ADD_IMM8_OPCODE_X1 = 30,
1192   STNT4_RRR_0_OPCODE_X1 = 47,
1193   STNT_ADD_IMM8_OPCODE_X1 = 31,
1194   STNT_RRR_0_OPCODE_X1 = 48,
1195   ST_ADD_IMM8_OPCODE_X1 = 32,
1196   ST_OPCODE_Y2 = 3,
1197   ST_RRR_0_OPCODE_X1 = 49,
1198   SUBXSC_RRR_0_OPCODE_X0 = 79,
1199   SUBXSC_RRR_0_OPCODE_X1 = 50,
1200   SUBX_RRR_0_OPCODE_X0 = 80,
1201   SUBX_RRR_0_OPCODE_X1 = 51,
1202   SUBX_RRR_0_OPCODE_Y0 = 2,
1203   SUBX_RRR_0_OPCODE_Y1 = 2,
1204   SUB_RRR_0_OPCODE_X0 = 81,
1205   SUB_RRR_0_OPCODE_X1 = 52,
1206   SUB_RRR_0_OPCODE_Y0 = 3,
1207   SUB_RRR_0_OPCODE_Y1 = 3,
1208   SWINT0_UNARY_OPCODE_X1 = 34,
1209   SWINT1_UNARY_OPCODE_X1 = 35,
1210   SWINT2_UNARY_OPCODE_X1 = 36,
1211   SWINT3_UNARY_OPCODE_X1 = 37,
1212   TBLIDXB0_UNARY_OPCODE_X0 = 9,
1213   TBLIDXB0_UNARY_OPCODE_Y0 = 9,
1214   TBLIDXB1_UNARY_OPCODE_X0 = 10,
1215   TBLIDXB1_UNARY_OPCODE_Y0 = 10,
1216   TBLIDXB2_UNARY_OPCODE_X0 = 11,
1217   TBLIDXB2_UNARY_OPCODE_Y0 = 11,
1218   TBLIDXB3_UNARY_OPCODE_X0 = 12,
1219   TBLIDXB3_UNARY_OPCODE_Y0 = 12,
1220   UNARY_RRR_0_OPCODE_X0 = 82,
1221   UNARY_RRR_0_OPCODE_X1 = 53,
1222   UNARY_RRR_1_OPCODE_Y0 = 3,
1223   UNARY_RRR_1_OPCODE_Y1 = 3,
1224   V1ADDI_IMM8_OPCODE_X0 = 8,
1225   V1ADDI_IMM8_OPCODE_X1 = 33,
1226   V1ADDUC_RRR_0_OPCODE_X0 = 83,
1227   V1ADDUC_RRR_0_OPCODE_X1 = 54,
1228   V1ADD_RRR_0_OPCODE_X0 = 84,
1229   V1ADD_RRR_0_OPCODE_X1 = 55,
1230   V1ADIFFU_RRR_0_OPCODE_X0 = 85,
1231   V1AVGU_RRR_0_OPCODE_X0 = 86,
1232   V1CMPEQI_IMM8_OPCODE_X0 = 9,
1233   V1CMPEQI_IMM8_OPCODE_X1 = 34,
1234   V1CMPEQ_RRR_0_OPCODE_X0 = 87,
1235   V1CMPEQ_RRR_0_OPCODE_X1 = 56,
1236   V1CMPLES_RRR_0_OPCODE_X0 = 88,
1237   V1CMPLES_RRR_0_OPCODE_X1 = 57,
1238   V1CMPLEU_RRR_0_OPCODE_X0 = 89,
1239   V1CMPLEU_RRR_0_OPCODE_X1 = 58,
1240   V1CMPLTSI_IMM8_OPCODE_X0 = 10,
1241   V1CMPLTSI_IMM8_OPCODE_X1 = 35,
1242   V1CMPLTS_RRR_0_OPCODE_X0 = 90,
1243   V1CMPLTS_RRR_0_OPCODE_X1 = 59,
1244   V1CMPLTUI_IMM8_OPCODE_X0 = 11,
1245   V1CMPLTUI_IMM8_OPCODE_X1 = 36,
1246   V1CMPLTU_RRR_0_OPCODE_X0 = 91,
1247   V1CMPLTU_RRR_0_OPCODE_X1 = 60,
1248   V1CMPNE_RRR_0_OPCODE_X0 = 92,
1249   V1CMPNE_RRR_0_OPCODE_X1 = 61,
1250   V1DDOTPUA_RRR_0_OPCODE_X0 = 161,
1251   V1DDOTPUSA_RRR_0_OPCODE_X0 = 93,
1252   V1DDOTPUS_RRR_0_OPCODE_X0 = 94,
1253   V1DDOTPU_RRR_0_OPCODE_X0 = 162,
1254   V1DOTPA_RRR_0_OPCODE_X0 = 95,
1255   V1DOTPUA_RRR_0_OPCODE_X0 = 163,
1256   V1DOTPUSA_RRR_0_OPCODE_X0 = 96,
1257   V1DOTPUS_RRR_0_OPCODE_X0 = 97,
1258   V1DOTPU_RRR_0_OPCODE_X0 = 164,
1259   V1DOTP_RRR_0_OPCODE_X0 = 98,
1260   V1INT_H_RRR_0_OPCODE_X0 = 99,
1261   V1INT_H_RRR_0_OPCODE_X1 = 62,
1262   V1INT_L_RRR_0_OPCODE_X0 = 100,
1263   V1INT_L_RRR_0_OPCODE_X1 = 63,
1264   V1MAXUI_IMM8_OPCODE_X0 = 12,
1265   V1MAXUI_IMM8_OPCODE_X1 = 37,
1266   V1MAXU_RRR_0_OPCODE_X0 = 101,
1267   V1MAXU_RRR_0_OPCODE_X1 = 64,
1268   V1MINUI_IMM8_OPCODE_X0 = 13,
1269   V1MINUI_IMM8_OPCODE_X1 = 38,
1270   V1MINU_RRR_0_OPCODE_X0 = 102,
1271   V1MINU_RRR_0_OPCODE_X1 = 65,
1272   V1MNZ_RRR_0_OPCODE_X0 = 103,
1273   V1MNZ_RRR_0_OPCODE_X1 = 66,
1274   V1MULTU_RRR_0_OPCODE_X0 = 104,
1275   V1MULUS_RRR_0_OPCODE_X0 = 105,
1276   V1MULU_RRR_0_OPCODE_X0 = 106,
1277   V1MZ_RRR_0_OPCODE_X0 = 107,
1278   V1MZ_RRR_0_OPCODE_X1 = 67,
1279   V1SADAU_RRR_0_OPCODE_X0 = 108,
1280   V1SADU_RRR_0_OPCODE_X0 = 109,
1281   V1SHLI_SHIFT_OPCODE_X0 = 7,
1282   V1SHLI_SHIFT_OPCODE_X1 = 7,
1283   V1SHL_RRR_0_OPCODE_X0 = 110,
1284   V1SHL_RRR_0_OPCODE_X1 = 68,
1285   V1SHRSI_SHIFT_OPCODE_X0 = 8,
1286   V1SHRSI_SHIFT_OPCODE_X1 = 8,
1287   V1SHRS_RRR_0_OPCODE_X0 = 111,
1288   V1SHRS_RRR_0_OPCODE_X1 = 69,
1289   V1SHRUI_SHIFT_OPCODE_X0 = 9,
1290   V1SHRUI_SHIFT_OPCODE_X1 = 9,
1291   V1SHRU_RRR_0_OPCODE_X0 = 112,
1292   V1SHRU_RRR_0_OPCODE_X1 = 70,
1293   V1SUBUC_RRR_0_OPCODE_X0 = 113,
1294   V1SUBUC_RRR_0_OPCODE_X1 = 71,
1295   V1SUB_RRR_0_OPCODE_X0 = 114,
1296   V1SUB_RRR_0_OPCODE_X1 = 72,
1297   V2ADDI_IMM8_OPCODE_X0 = 14,
1298   V2ADDI_IMM8_OPCODE_X1 = 39,
1299   V2ADDSC_RRR_0_OPCODE_X0 = 115,
1300   V2ADDSC_RRR_0_OPCODE_X1 = 73,
1301   V2ADD_RRR_0_OPCODE_X0 = 116,
1302   V2ADD_RRR_0_OPCODE_X1 = 74,
1303   V2ADIFFS_RRR_0_OPCODE_X0 = 117,
1304   V2AVGS_RRR_0_OPCODE_X0 = 118,
1305   V2CMPEQI_IMM8_OPCODE_X0 = 15,
1306   V2CMPEQI_IMM8_OPCODE_X1 = 40,
1307   V2CMPEQ_RRR_0_OPCODE_X0 = 119,
1308   V2CMPEQ_RRR_0_OPCODE_X1 = 75,
1309   V2CMPLES_RRR_0_OPCODE_X0 = 120,
1310   V2CMPLES_RRR_0_OPCODE_X1 = 76,
1311   V2CMPLEU_RRR_0_OPCODE_X0 = 121,
1312   V2CMPLEU_RRR_0_OPCODE_X1 = 77,
1313   V2CMPLTSI_IMM8_OPCODE_X0 = 16,
1314   V2CMPLTSI_IMM8_OPCODE_X1 = 41,
1315   V2CMPLTS_RRR_0_OPCODE_X0 = 122,
1316   V2CMPLTS_RRR_0_OPCODE_X1 = 78,
1317   V2CMPLTUI_IMM8_OPCODE_X0 = 17,
1318   V2CMPLTUI_IMM8_OPCODE_X1 = 42,
1319   V2CMPLTU_RRR_0_OPCODE_X0 = 123,
1320   V2CMPLTU_RRR_0_OPCODE_X1 = 79,
1321   V2CMPNE_RRR_0_OPCODE_X0 = 124,
1322   V2CMPNE_RRR_0_OPCODE_X1 = 80,
1323   V2DOTPA_RRR_0_OPCODE_X0 = 125,
1324   V2DOTP_RRR_0_OPCODE_X0 = 126,
1325   V2INT_H_RRR_0_OPCODE_X0 = 127,
1326   V2INT_H_RRR_0_OPCODE_X1 = 81,
1327   V2INT_L_RRR_0_OPCODE_X0 = 128,
1328   V2INT_L_RRR_0_OPCODE_X1 = 82,
1329   V2MAXSI_IMM8_OPCODE_X0 = 18,
1330   V2MAXSI_IMM8_OPCODE_X1 = 43,
1331   V2MAXS_RRR_0_OPCODE_X0 = 129,
1332   V2MAXS_RRR_0_OPCODE_X1 = 83,
1333   V2MINSI_IMM8_OPCODE_X0 = 19,
1334   V2MINSI_IMM8_OPCODE_X1 = 44,
1335   V2MINS_RRR_0_OPCODE_X0 = 130,
1336   V2MINS_RRR_0_OPCODE_X1 = 84,
1337   V2MNZ_RRR_0_OPCODE_X0 = 131,
1338   V2MNZ_RRR_0_OPCODE_X1 = 85,
1339   V2MULFSC_RRR_0_OPCODE_X0 = 132,
1340   V2MULS_RRR_0_OPCODE_X0 = 133,
1341   V2MULTS_RRR_0_OPCODE_X0 = 134,
1342   V2MZ_RRR_0_OPCODE_X0 = 135,
1343   V2MZ_RRR_0_OPCODE_X1 = 86,
1344   V2PACKH_RRR_0_OPCODE_X0 = 136,
1345   V2PACKH_RRR_0_OPCODE_X1 = 87,
1346   V2PACKL_RRR_0_OPCODE_X0 = 137,
1347   V2PACKL_RRR_0_OPCODE_X1 = 88,
1348   V2PACKUC_RRR_0_OPCODE_X0 = 138,
1349   V2PACKUC_RRR_0_OPCODE_X1 = 89,
1350   V2SADAS_RRR_0_OPCODE_X0 = 139,
1351   V2SADAU_RRR_0_OPCODE_X0 = 140,
1352   V2SADS_RRR_0_OPCODE_X0 = 141,
1353   V2SADU_RRR_0_OPCODE_X0 = 142,
1354   V2SHLI_SHIFT_OPCODE_X0 = 10,
1355   V2SHLI_SHIFT_OPCODE_X1 = 10,
1356   V2SHLSC_RRR_0_OPCODE_X0 = 143,
1357   V2SHLSC_RRR_0_OPCODE_X1 = 90,
1358   V2SHL_RRR_0_OPCODE_X0 = 144,
1359   V2SHL_RRR_0_OPCODE_X1 = 91,
1360   V2SHRSI_SHIFT_OPCODE_X0 = 11,
1361   V2SHRSI_SHIFT_OPCODE_X1 = 11,
1362   V2SHRS_RRR_0_OPCODE_X0 = 145,
1363   V2SHRS_RRR_0_OPCODE_X1 = 92,
1364   V2SHRUI_SHIFT_OPCODE_X0 = 12,
1365   V2SHRUI_SHIFT_OPCODE_X1 = 12,
1366   V2SHRU_RRR_0_OPCODE_X0 = 146,
1367   V2SHRU_RRR_0_OPCODE_X1 = 93,
1368   V2SUBSC_RRR_0_OPCODE_X0 = 147,
1369   V2SUBSC_RRR_0_OPCODE_X1 = 94,
1370   V2SUB_RRR_0_OPCODE_X0 = 148,
1371   V2SUB_RRR_0_OPCODE_X1 = 95,
1372   V4ADDSC_RRR_0_OPCODE_X0 = 149,
1373   V4ADDSC_RRR_0_OPCODE_X1 = 96,
1374   V4ADD_RRR_0_OPCODE_X0 = 150,
1375   V4ADD_RRR_0_OPCODE_X1 = 97,
1376   V4INT_H_RRR_0_OPCODE_X0 = 151,
1377   V4INT_H_RRR_0_OPCODE_X1 = 98,
1378   V4INT_L_RRR_0_OPCODE_X0 = 152,
1379   V4INT_L_RRR_0_OPCODE_X1 = 99,
1380   V4PACKSC_RRR_0_OPCODE_X0 = 153,
1381   V4PACKSC_RRR_0_OPCODE_X1 = 100,
1382   V4SHLSC_RRR_0_OPCODE_X0 = 154,
1383   V4SHLSC_RRR_0_OPCODE_X1 = 101,
1384   V4SHL_RRR_0_OPCODE_X0 = 155,
1385   V4SHL_RRR_0_OPCODE_X1 = 102,
1386   V4SHRS_RRR_0_OPCODE_X0 = 156,
1387   V4SHRS_RRR_0_OPCODE_X1 = 103,
1388   V4SHRU_RRR_0_OPCODE_X0 = 157,
1389   V4SHRU_RRR_0_OPCODE_X1 = 104,
1390   V4SUBSC_RRR_0_OPCODE_X0 = 158,
1391   V4SUBSC_RRR_0_OPCODE_X1 = 105,
1392   V4SUB_RRR_0_OPCODE_X0 = 159,
1393   V4SUB_RRR_0_OPCODE_X1 = 106,
1394   WH64_UNARY_OPCODE_X1 = 38,
1395   XORI_IMM8_OPCODE_X0 = 20,
1396   XORI_IMM8_OPCODE_X1 = 45,
1397   XOR_RRR_0_OPCODE_X0 = 160,
1398   XOR_RRR_0_OPCODE_X1 = 107,
1399   XOR_RRR_5_OPCODE_Y0 = 3,
1400   XOR_RRR_5_OPCODE_Y1 = 3
1401 };
1402 
1403 
1404 #endif /* __ASSEMBLER__ */
1405 
1406 #endif /* __ARCH_OPCODE_H__ */
1407