• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 
2 /*---------------------------------------------------------------*/
3 /*--- begin                                 host_amd64_defs.h ---*/
4 /*---------------------------------------------------------------*/
5 
6 /*
7    This file is part of Valgrind, a dynamic binary instrumentation
8    framework.
9 
10    Copyright (C) 2004-2010 OpenWorks LLP
11       info@open-works.net
12 
13    This program is free software; you can redistribute it and/or
14    modify it under the terms of the GNU General Public License as
15    published by the Free Software Foundation; either version 2 of the
16    License, or (at your option) any later version.
17 
18    This program is distributed in the hope that it will be useful, but
19    WITHOUT ANY WARRANTY; without even the implied warranty of
20    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
21    General Public License for more details.
22 
23    You should have received a copy of the GNU General Public License
24    along with this program; if not, write to the Free Software
25    Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
26    02110-1301, USA.
27 
28    The GNU General Public License is contained in the file COPYING.
29 
30    Neither the names of the U.S. Department of Energy nor the
31    University of California nor the names of its contributors may be
32    used to endorse or promote products derived from this software
33    without prior written permission.
34 */
35 
36 #ifndef __VEX_HOST_AMD64_DEFS_H
37 #define __VEX_HOST_AMD64_DEFS_H
38 
39 
40 /* --------- Registers. --------- */
41 
42 /* The usual HReg abstraction.  There are 16 real int regs, 6 real
43    float regs, and 16 real vector regs.
44 */
45 
46 extern void ppHRegAMD64 ( HReg );
47 
48 extern HReg hregAMD64_RAX ( void );
49 extern HReg hregAMD64_RBX ( void );
50 extern HReg hregAMD64_RCX ( void );
51 extern HReg hregAMD64_RDX ( void );
52 extern HReg hregAMD64_RSP ( void );
53 extern HReg hregAMD64_RBP ( void );
54 extern HReg hregAMD64_RSI ( void );
55 extern HReg hregAMD64_RDI ( void );
56 extern HReg hregAMD64_R8  ( void );
57 extern HReg hregAMD64_R9  ( void );
58 extern HReg hregAMD64_R10 ( void );
59 extern HReg hregAMD64_R11 ( void );
60 extern HReg hregAMD64_R12 ( void );
61 extern HReg hregAMD64_R13 ( void );
62 extern HReg hregAMD64_R14 ( void );
63 extern HReg hregAMD64_R15 ( void );
64 
65 extern HReg hregAMD64_FAKE0 ( void );
66 extern HReg hregAMD64_FAKE1 ( void );
67 extern HReg hregAMD64_FAKE2 ( void );
68 extern HReg hregAMD64_FAKE3 ( void );
69 extern HReg hregAMD64_FAKE4 ( void );
70 extern HReg hregAMD64_FAKE5 ( void );
71 
72 extern HReg hregAMD64_XMM0  ( void );
73 extern HReg hregAMD64_XMM1  ( void );
74 extern HReg hregAMD64_XMM2  ( void );
75 extern HReg hregAMD64_XMM3  ( void );
76 extern HReg hregAMD64_XMM4  ( void );
77 extern HReg hregAMD64_XMM5  ( void );
78 extern HReg hregAMD64_XMM6  ( void );
79 extern HReg hregAMD64_XMM7  ( void );
80 extern HReg hregAMD64_XMM8  ( void );
81 extern HReg hregAMD64_XMM9  ( void );
82 extern HReg hregAMD64_XMM10 ( void );
83 extern HReg hregAMD64_XMM11 ( void );
84 extern HReg hregAMD64_XMM12 ( void );
85 extern HReg hregAMD64_XMM13 ( void );
86 extern HReg hregAMD64_XMM14 ( void );
87 extern HReg hregAMD64_XMM15 ( void );
88 
89 
90 /* --------- Condition codes, AMD encoding. --------- */
91 
92 typedef
93    enum {
94       Acc_O      = 0,  /* overflow           */
95       Acc_NO     = 1,  /* no overflow        */
96 
97       Acc_B      = 2,  /* below              */
98       Acc_NB     = 3,  /* not below          */
99 
100       Acc_Z      = 4,  /* zero               */
101       Acc_NZ     = 5,  /* not zero           */
102 
103       Acc_BE     = 6,  /* below or equal     */
104       Acc_NBE    = 7,  /* not below or equal */
105 
106       Acc_S      = 8,  /* negative           */
107       Acc_NS     = 9,  /* not negative       */
108 
109       Acc_P      = 10, /* parity even        */
110       Acc_NP     = 11, /* not parity even    */
111 
112       Acc_L      = 12, /* jump less          */
113       Acc_NL     = 13, /* not less           */
114 
115       Acc_LE     = 14, /* less or equal      */
116       Acc_NLE    = 15, /* not less or equal  */
117 
118       Acc_ALWAYS = 16  /* the usual hack     */
119    }
120    AMD64CondCode;
121 
122 extern HChar* showAMD64CondCode ( AMD64CondCode );
123 
124 
125 /* --------- Memory address expressions (amodes). --------- */
126 
127 typedef
128    enum {
129      Aam_IR,        /* Immediate + Reg */
130      Aam_IRRS       /* Immediate + Reg1 + (Reg2 << Shift) */
131    }
132    AMD64AModeTag;
133 
134 typedef
135    struct {
136       AMD64AModeTag tag;
137       union {
138          struct {
139             UInt imm;
140             HReg reg;
141          } IR;
142          struct {
143             UInt imm;
144             HReg base;
145             HReg index;
146             Int  shift; /* 0, 1, 2 or 3 only */
147          } IRRS;
148       } Aam;
149    }
150    AMD64AMode;
151 
152 extern AMD64AMode* AMD64AMode_IR   ( UInt, HReg );
153 extern AMD64AMode* AMD64AMode_IRRS ( UInt, HReg, HReg, Int );
154 
155 extern AMD64AMode* dopyAMD64AMode ( AMD64AMode* );
156 
157 extern void ppAMD64AMode ( AMD64AMode* );
158 
159 
160 /* --------- Operand, which can be reg, immediate or memory. --------- */
161 
162 typedef
163    enum {
164       Armi_Imm,
165       Armi_Reg,
166       Armi_Mem
167    }
168    AMD64RMITag;
169 
170 typedef
171    struct {
172       AMD64RMITag tag;
173       union {
174          struct {
175             UInt imm32;
176          } Imm;
177          struct {
178             HReg reg;
179          } Reg;
180          struct {
181             AMD64AMode* am;
182          } Mem;
183       }
184       Armi;
185    }
186    AMD64RMI;
187 
188 extern AMD64RMI* AMD64RMI_Imm ( UInt );
189 extern AMD64RMI* AMD64RMI_Reg ( HReg );
190 extern AMD64RMI* AMD64RMI_Mem ( AMD64AMode* );
191 
192 extern void ppAMD64RMI ( AMD64RMI* );
193 
194 
195 /* --------- Operand, which can be reg or immediate only. --------- */
196 
197 typedef
198    enum {
199       Ari_Imm,
200       Ari_Reg
201    }
202    AMD64RITag;
203 
204 typedef
205    struct {
206       AMD64RITag tag;
207       union {
208          struct {
209             UInt imm32;
210          } Imm;
211          struct {
212             HReg reg;
213          } Reg;
214       }
215       Ari;
216    }
217    AMD64RI;
218 
219 extern AMD64RI* AMD64RI_Imm ( UInt );
220 extern AMD64RI* AMD64RI_Reg ( HReg );
221 
222 extern void ppAMD64RI ( AMD64RI* );
223 
224 
225 /* --------- Operand, which can be reg or memory only. --------- */
226 
227 typedef
228    enum {
229       Arm_Reg,
230       Arm_Mem
231    }
232    AMD64RMTag;
233 
234 typedef
235    struct {
236       AMD64RMTag tag;
237       union {
238          struct {
239             HReg reg;
240          } Reg;
241          struct {
242             AMD64AMode* am;
243          } Mem;
244       }
245       Arm;
246    }
247    AMD64RM;
248 
249 extern AMD64RM* AMD64RM_Reg ( HReg );
250 extern AMD64RM* AMD64RM_Mem ( AMD64AMode* );
251 
252 extern void ppAMD64RM ( AMD64RM* );
253 
254 
255 /* --------- Instructions. --------- */
256 
257 /* --------- */
258 typedef
259    enum {
260       Aun_NEG,
261       Aun_NOT
262    }
263    AMD64UnaryOp;
264 
265 extern HChar* showAMD64UnaryOp ( AMD64UnaryOp );
266 
267 
268 /* --------- */
269 typedef
270    enum {
271       Aalu_INVALID,
272       Aalu_MOV,
273       Aalu_CMP,
274       Aalu_ADD, Aalu_SUB, Aalu_ADC, Aalu_SBB,
275       Aalu_AND, Aalu_OR, Aalu_XOR,
276       Aalu_MUL
277    }
278    AMD64AluOp;
279 
280 extern HChar* showAMD64AluOp ( AMD64AluOp );
281 
282 
283 /* --------- */
284 typedef
285    enum {
286       Ash_INVALID,
287       Ash_SHL, Ash_SHR, Ash_SAR
288    }
289    AMD64ShiftOp;
290 
291 extern HChar* showAMD64ShiftOp ( AMD64ShiftOp );
292 
293 
294 /* --------- */
295 typedef
296    enum {
297       Afp_INVALID,
298       /* Binary */
299       Afp_SCALE, Afp_ATAN, Afp_YL2X, Afp_YL2XP1, Afp_PREM, Afp_PREM1,
300       /* Unary */
301       Afp_SQRT,
302       Afp_SIN, Afp_COS, Afp_TAN,
303       Afp_ROUND, Afp_2XM1
304    }
305    A87FpOp;
306 
307 extern HChar* showA87FpOp ( A87FpOp );
308 
309 
310 /* --------- */
311 typedef
312    enum {
313       Asse_INVALID,
314       /* mov */
315       Asse_MOV,
316       /* Floating point binary */
317       Asse_ADDF, Asse_SUBF, Asse_MULF, Asse_DIVF,
318       Asse_MAXF, Asse_MINF,
319       Asse_CMPEQF, Asse_CMPLTF, Asse_CMPLEF, Asse_CMPUNF,
320       /* Floating point unary */
321       Asse_RCPF, Asse_RSQRTF, Asse_SQRTF,
322       /* Bitwise */
323       Asse_AND, Asse_OR, Asse_XOR, Asse_ANDN,
324       Asse_ADD8, Asse_ADD16, Asse_ADD32, Asse_ADD64,
325       Asse_QADD8U, Asse_QADD16U,
326       Asse_QADD8S, Asse_QADD16S,
327       Asse_SUB8, Asse_SUB16, Asse_SUB32, Asse_SUB64,
328       Asse_QSUB8U, Asse_QSUB16U,
329       Asse_QSUB8S, Asse_QSUB16S,
330       Asse_MUL16,
331       Asse_MULHI16U,
332       Asse_MULHI16S,
333       Asse_AVG8U, Asse_AVG16U,
334       Asse_MAX16S,
335       Asse_MAX8U,
336       Asse_MIN16S,
337       Asse_MIN8U,
338       Asse_CMPEQ8, Asse_CMPEQ16, Asse_CMPEQ32,
339       Asse_CMPGT8S, Asse_CMPGT16S, Asse_CMPGT32S,
340       Asse_SHL16, Asse_SHL32, Asse_SHL64,
341       Asse_SHR16, Asse_SHR32, Asse_SHR64,
342       Asse_SAR16, Asse_SAR32,
343       Asse_PACKSSD, Asse_PACKSSW, Asse_PACKUSW,
344       Asse_UNPCKHB, Asse_UNPCKHW, Asse_UNPCKHD, Asse_UNPCKHQ,
345       Asse_UNPCKLB, Asse_UNPCKLW, Asse_UNPCKLD, Asse_UNPCKLQ
346    }
347    AMD64SseOp;
348 
349 extern HChar* showAMD64SseOp ( AMD64SseOp );
350 
351 
352 /* --------- */
353 typedef
354    enum {
355       Ain_Imm64,       /* Generate 64-bit literal to register */
356       Ain_Alu64R,      /* 64-bit mov/arith/logical, dst=REG */
357       Ain_Alu64M,      /* 64-bit mov/arith/logical, dst=MEM */
358       Ain_Sh64,        /* 64-bit shift/rotate, dst=REG or MEM */
359       Ain_Test64,      /* 64-bit test (AND, set flags, discard result) */
360       Ain_Unary64,     /* 64-bit not and neg */
361       Ain_Lea64,       /* 64-bit compute EA into a reg */
362       Ain_MulL,        /* widening multiply */
363       Ain_Div,         /* div and mod */
364 //..       Xin_Sh3232,    /* shldl or shrdl */
365       Ain_Push,        /* push 64-bit value on stack */
366       Ain_Call,        /* call to address in register */
367       Ain_Goto,        /* conditional/unconditional jmp to dst */
368       Ain_CMov64,      /* conditional move */
369       Ain_MovxLQ,      /* reg-reg move, zx-ing/sx-ing top half */
370       Ain_LoadEX,      /* mov{s,z}{b,w,l}q from mem to reg */
371       Ain_Store,       /* store 32/16/8 bit value in memory */
372       Ain_Set64,       /* convert condition code to 64-bit value */
373       Ain_Bsfr64,      /* 64-bit bsf/bsr */
374       Ain_MFence,      /* mem fence */
375       Ain_ACAS,        /* 8/16/32/64-bit lock;cmpxchg */
376       Ain_DACAS,       /* lock;cmpxchg8b/16b (doubleword ACAS, 2 x
377                           32-bit or 2 x 64-bit only) */
378 
379       Ain_A87Free,     /* free up x87 registers */
380       Ain_A87PushPop,  /* x87 loads/stores */
381       Ain_A87FpOp,     /* x87 operations */
382       Ain_A87LdCW,     /* load x87 control word */
383       Ain_A87StSW,     /* store x87 status word */
384 //..
385 //..       Xin_FpUnary,   /* FP fake unary op */
386 //..       Xin_FpBinary,  /* FP fake binary op */
387 //..       Xin_FpLdSt,    /* FP fake load/store */
388 //..       Xin_FpLdStI,   /* FP fake load/store, converting to/from Int */
389 //..       Xin_Fp64to32,  /* FP round IEEE754 double to IEEE754 single */
390 //..       Xin_FpCMov,    /* FP fake floating point conditional move */
391       Ain_LdMXCSR,     /* load %mxcsr */
392 //..       Xin_FpStSW_AX, /* fstsw %ax */
393       Ain_SseUComIS,   /* ucomisd/ucomiss, then get %rflags into int
394                           register */
395       Ain_SseSI2SF,    /* scalar 32/64 int to 32/64 float conversion */
396       Ain_SseSF2SI,    /* scalar 32/64 float to 32/64 int conversion */
397       Ain_SseSDSS,     /* scalar float32 to/from float64 */
398 //..
399 //..       Xin_SseConst,  /* Generate restricted SSE literal */
400       Ain_SseLdSt,     /* SSE load/store 32/64/128 bits, no alignment
401                           constraints, upper 96/64/0 bits arbitrary */
402       Ain_SseLdzLO,    /* SSE load low 32/64 bits, zero remainder of reg */
403       Ain_Sse32Fx4,    /* SSE binary, 32Fx4 */
404       Ain_Sse32FLo,    /* SSE binary, 32F in lowest lane only */
405       Ain_Sse64Fx2,    /* SSE binary, 64Fx2 */
406       Ain_Sse64FLo,    /* SSE binary, 64F in lowest lane only */
407       Ain_SseReRg,     /* SSE binary general reg-reg, Re, Rg */
408       Ain_SseCMov,     /* SSE conditional move */
409       Ain_SseShuf      /* SSE2 shuffle (pshufd) */
410    }
411    AMD64InstrTag;
412 
413 /* Destinations are on the RIGHT (second operand) */
414 
415 typedef
416    struct {
417       AMD64InstrTag tag;
418       union {
419          struct {
420             ULong imm64;
421             HReg  dst;
422          } Imm64;
423          struct {
424             AMD64AluOp op;
425             AMD64RMI*  src;
426             HReg       dst;
427          } Alu64R;
428          struct {
429             AMD64AluOp  op;
430             AMD64RI*    src;
431             AMD64AMode* dst;
432          } Alu64M;
433          struct {
434             AMD64ShiftOp op;
435             UInt         src;  /* shift amount, or 0 means %cl */
436             HReg         dst;
437          } Sh64;
438          struct {
439             UInt   imm32;
440             HReg   dst;
441          } Test64;
442          /* Not and Neg */
443          struct {
444             AMD64UnaryOp op;
445             HReg         dst;
446          } Unary64;
447          /* 64-bit compute EA into a reg */
448          struct {
449             AMD64AMode* am;
450             HReg        dst;
451          } Lea64;
452          /* 64 x 64 -> 128 bit widening multiply: RDX:RAX = RAX *s/u
453             r/m64 */
454          struct {
455             Bool     syned;
456             AMD64RM* src;
457          } MulL;
458           /* amd64 div/idiv instruction.  Modifies RDX and RAX and
459 	     reads src. */
460          struct {
461             Bool     syned;
462             Int      sz; /* 4 or 8 only */
463             AMD64RM* src;
464          } Div;
465 //..          /* shld/shrd.  op may only be Xsh_SHL or Xsh_SHR */
466 //..          struct {
467 //..             X86ShiftOp op;
468 //..             UInt       amt;   /* shift amount, or 0 means %cl */
469 //..             HReg       src;
470 //..             HReg       dst;
471 //..          } Sh3232;
472          struct {
473             AMD64RMI* src;
474          } Push;
475          /* Pseudo-insn.  Call target (an absolute address), on given
476             condition (which could be Xcc_ALWAYS). */
477          struct {
478             AMD64CondCode cond;
479             Addr64        target;
480             Int           regparms; /* 0 .. 6 */
481          } Call;
482          /* Pseudo-insn.  Goto dst, on given condition (which could be
483             Acc_ALWAYS). */
484          struct {
485             IRJumpKind    jk;
486             AMD64CondCode cond;
487             AMD64RI*      dst;
488          } Goto;
489          /* Mov src to dst on the given condition, which may not
490             be the bogus Acc_ALWAYS. */
491          struct {
492             AMD64CondCode cond;
493             AMD64RM*      src;
494             HReg          dst;
495          } CMov64;
496          /* reg-reg move, sx-ing/zx-ing top half */
497          struct {
498             Bool syned;
499             HReg src;
500             HReg dst;
501          } MovxLQ;
502          /* Sign/Zero extending loads.  Dst size is always 64 bits. */
503          struct {
504             UChar       szSmall; /* only 1, 2 or 4 */
505             Bool        syned;
506             AMD64AMode* src;
507             HReg        dst;
508          } LoadEX;
509          /* 32/16/8 bit stores. */
510          struct {
511             UChar       sz; /* only 1, 2 or 4 */
512             HReg        src;
513             AMD64AMode* dst;
514          } Store;
515          /* Convert an amd64 condition code to a 64-bit value (0 or 1). */
516          struct {
517             AMD64CondCode cond;
518             HReg          dst;
519          } Set64;
520          /* 64-bit bsf or bsr. */
521          struct {
522             Bool isFwds;
523             HReg src;
524             HReg dst;
525          } Bsfr64;
526          /* Mem fence.  In short, an insn which flushes all preceding
527             loads and stores as much as possible before continuing.
528             On AMD64 we emit a real "mfence". */
529          struct {
530          } MFence;
531          struct {
532             AMD64AMode* addr;
533             UChar       sz; /* 1, 2, 4 or 8 */
534          } ACAS;
535          struct {
536             AMD64AMode* addr;
537             UChar       sz; /* 4 or 8 only */
538          } DACAS;
539 
540          /* --- X87 --- */
541 
542          /* A very minimal set of x87 insns, that operate exactly in a
543             stack-like way so no need to think about x87 registers. */
544 
545          /* Do 'ffree' on %st(7) .. %st(7-nregs) */
546          struct {
547             Int nregs; /* 1 <= nregs <= 7 */
548          } A87Free;
549 
550          /* Push a 32- or 64-bit FP value from memory onto the stack,
551             or move a value from the stack to memory and remove it
552             from the stack. */
553          struct {
554             AMD64AMode* addr;
555             Bool        isPush;
556             UChar       szB; /* 4 or 8 */
557          } A87PushPop;
558 
559          /* Do an operation on the top-of-stack.  This can be unary, in
560             which case it is %st0 = OP( %st0 ), or binary: %st0 = OP(
561             %st0, %st1 ). */
562          struct {
563             A87FpOp op;
564          } A87FpOp;
565 
566          /* Load the FPU control word. */
567          struct {
568             AMD64AMode* addr;
569          } A87LdCW;
570 
571          /* Store the FPU status word (fstsw m16) */
572          struct {
573             AMD64AMode* addr;
574          } A87StSW;
575 
576          /* --- SSE --- */
577 
578          /* Load 32 bits into %mxcsr. */
579          struct {
580             AMD64AMode* addr;
581          }
582          LdMXCSR;
583 //..          /* fstsw %ax */
584 //..          struct {
585 //..             /* no fields */
586 //..          }
587 //..          FpStSW_AX;
588          /* ucomisd/ucomiss, then get %rflags into int register */
589          struct {
590             UChar   sz;   /* 4 or 8 only */
591             HReg    srcL; /* xmm */
592             HReg    srcR; /* xmm */
593             HReg    dst;  /* int */
594          } SseUComIS;
595          /* scalar 32/64 int to 32/64 float conversion */
596          struct {
597             UChar szS; /* 4 or 8 */
598             UChar szD; /* 4 or 8 */
599             HReg  src; /* i class */
600             HReg  dst; /* v class */
601          } SseSI2SF;
602          /* scalar 32/64 float to 32/64 int conversion */
603          struct {
604             UChar szS; /* 4 or 8 */
605             UChar szD; /* 4 or 8 */
606             HReg  src; /* v class */
607             HReg  dst; /* i class */
608          } SseSF2SI;
609          /* scalar float32 to/from float64 */
610          struct {
611             Bool from64; /* True: 64->32; False: 32->64 */
612             HReg src;
613             HReg dst;
614          } SseSDSS;
615 //..
616 //..          /* Simplistic SSE[123] */
617 //..          struct {
618 //..             UShort  con;
619 //..             HReg    dst;
620 //..          } SseConst;
621          struct {
622             Bool        isLoad;
623             UChar       sz; /* 4, 8 or 16 only */
624             HReg        reg;
625             AMD64AMode* addr;
626          } SseLdSt;
627          struct {
628             Int         sz; /* 4 or 8 only */
629             HReg        reg;
630             AMD64AMode* addr;
631          } SseLdzLO;
632          struct {
633             AMD64SseOp op;
634             HReg       src;
635             HReg       dst;
636          } Sse32Fx4;
637          struct {
638             AMD64SseOp op;
639             HReg       src;
640             HReg       dst;
641          } Sse32FLo;
642          struct {
643             AMD64SseOp op;
644             HReg       src;
645             HReg       dst;
646          } Sse64Fx2;
647          struct {
648             AMD64SseOp op;
649             HReg       src;
650             HReg       dst;
651          } Sse64FLo;
652          struct {
653             AMD64SseOp op;
654             HReg       src;
655             HReg       dst;
656          } SseReRg;
657          /* Mov src to dst on the given condition, which may not
658             be the bogus Xcc_ALWAYS. */
659          struct {
660             AMD64CondCode cond;
661             HReg          src;
662             HReg          dst;
663          } SseCMov;
664          struct {
665             Int    order; /* 0 <= order <= 0xFF */
666             HReg   src;
667             HReg   dst;
668          } SseShuf;
669 
670       } Ain;
671    }
672    AMD64Instr;
673 
674 extern AMD64Instr* AMD64Instr_Imm64      ( ULong imm64, HReg dst );
675 extern AMD64Instr* AMD64Instr_Alu64R     ( AMD64AluOp, AMD64RMI*, HReg );
676 extern AMD64Instr* AMD64Instr_Alu64M     ( AMD64AluOp, AMD64RI*,  AMD64AMode* );
677 extern AMD64Instr* AMD64Instr_Unary64    ( AMD64UnaryOp op, HReg dst );
678 extern AMD64Instr* AMD64Instr_Lea64      ( AMD64AMode* am, HReg dst );
679 extern AMD64Instr* AMD64Instr_Sh64       ( AMD64ShiftOp, UInt, HReg );
680 extern AMD64Instr* AMD64Instr_Test64     ( UInt imm32, HReg dst );
681 extern AMD64Instr* AMD64Instr_MulL       ( Bool syned, AMD64RM* );
682 extern AMD64Instr* AMD64Instr_Div        ( Bool syned, Int sz, AMD64RM* );
683 //.. extern AMD64Instr* AMD64Instr_Sh3232    ( AMD64ShiftOp, UInt amt, HReg src, HReg dst );
684 extern AMD64Instr* AMD64Instr_Push       ( AMD64RMI* );
685 extern AMD64Instr* AMD64Instr_Call       ( AMD64CondCode, Addr64, Int );
686 extern AMD64Instr* AMD64Instr_Goto       ( IRJumpKind, AMD64CondCode cond, AMD64RI* dst );
687 extern AMD64Instr* AMD64Instr_CMov64     ( AMD64CondCode, AMD64RM* src, HReg dst );
688 extern AMD64Instr* AMD64Instr_MovxLQ     ( Bool syned, HReg src, HReg dst );
689 extern AMD64Instr* AMD64Instr_LoadEX     ( UChar szSmall, Bool syned,
690                                            AMD64AMode* src, HReg dst );
691 extern AMD64Instr* AMD64Instr_Store      ( UChar sz, HReg src, AMD64AMode* dst );
692 extern AMD64Instr* AMD64Instr_Set64      ( AMD64CondCode cond, HReg dst );
693 extern AMD64Instr* AMD64Instr_Bsfr64     ( Bool isFwds, HReg src, HReg dst );
694 extern AMD64Instr* AMD64Instr_MFence     ( void );
695 extern AMD64Instr* AMD64Instr_ACAS       ( AMD64AMode* addr, UChar sz );
696 extern AMD64Instr* AMD64Instr_DACAS      ( AMD64AMode* addr, UChar sz );
697 
698 extern AMD64Instr* AMD64Instr_A87Free    ( Int nregs );
699 extern AMD64Instr* AMD64Instr_A87PushPop ( AMD64AMode* addr, Bool isPush, UChar szB );
700 extern AMD64Instr* AMD64Instr_A87FpOp    ( A87FpOp op );
701 extern AMD64Instr* AMD64Instr_A87LdCW    ( AMD64AMode* addr );
702 extern AMD64Instr* AMD64Instr_A87StSW    ( AMD64AMode* addr );
703 //..
704 //.. extern AMD64Instr* AMD64Instr_FpUnary   ( AMD64FpOp op, HReg src, HReg dst );
705 //.. extern AMD64Instr* AMD64Instr_FpBinary  ( AMD64FpOp op, HReg srcL, HReg srcR, HReg dst );
706 //.. extern AMD64Instr* AMD64Instr_FpLdSt    ( Bool isLoad, UChar sz, HReg reg, AMD64AMode* );
707 //.. extern AMD64Instr* AMD64Instr_FpLdStI   ( Bool isLoad, UChar sz, HReg reg, AMD64AMode* );
708 //.. extern AMD64Instr* AMD64Instr_Fp64to32  ( HReg src, HReg dst );
709 //.. extern AMD64Instr* AMD64Instr_FpCMov    ( AMD64CondCode, HReg src, HReg dst );
710 extern AMD64Instr* AMD64Instr_LdMXCSR    ( AMD64AMode* );
711 //.. extern AMD64Instr* AMD64Instr_FpStSW_AX ( void );
712 extern AMD64Instr* AMD64Instr_SseUComIS  ( Int sz, HReg srcL, HReg srcR, HReg dst );
713 extern AMD64Instr* AMD64Instr_SseSI2SF   ( Int szS, Int szD, HReg src, HReg dst );
714 extern AMD64Instr* AMD64Instr_SseSF2SI   ( Int szS, Int szD, HReg src, HReg dst );
715 extern AMD64Instr* AMD64Instr_SseSDSS    ( Bool from64, HReg src, HReg dst );
716 //..
717 //.. extern AMD64Instr* AMD64Instr_SseConst  ( UShort con, HReg dst );
718 extern AMD64Instr* AMD64Instr_SseLdSt    ( Bool isLoad, Int sz, HReg, AMD64AMode* );
719 extern AMD64Instr* AMD64Instr_SseLdzLO   ( Int sz, HReg, AMD64AMode* );
720 extern AMD64Instr* AMD64Instr_Sse32Fx4   ( AMD64SseOp, HReg, HReg );
721 extern AMD64Instr* AMD64Instr_Sse32FLo   ( AMD64SseOp, HReg, HReg );
722 extern AMD64Instr* AMD64Instr_Sse64Fx2   ( AMD64SseOp, HReg, HReg );
723 extern AMD64Instr* AMD64Instr_Sse64FLo   ( AMD64SseOp, HReg, HReg );
724 extern AMD64Instr* AMD64Instr_SseReRg    ( AMD64SseOp, HReg, HReg );
725 extern AMD64Instr* AMD64Instr_SseCMov    ( AMD64CondCode, HReg src, HReg dst );
726 extern AMD64Instr* AMD64Instr_SseShuf    ( Int order, HReg src, HReg dst );
727 
728 
729 extern void ppAMD64Instr ( AMD64Instr*, Bool );
730 
731 /* Some functions that insulate the register allocator from details
732    of the underlying instruction set. */
733 extern void         getRegUsage_AMD64Instr ( HRegUsage*, AMD64Instr*, Bool );
734 extern void         mapRegs_AMD64Instr     ( HRegRemap*, AMD64Instr*, Bool );
735 extern Bool         isMove_AMD64Instr      ( AMD64Instr*, HReg*, HReg* );
736 extern Int          emit_AMD64Instr        ( UChar* buf, Int nbuf, AMD64Instr*,
737                                              Bool, void* dispatch );
738 
739 extern void genSpill_AMD64  ( /*OUT*/HInstr** i1, /*OUT*/HInstr** i2,
740                               HReg rreg, Int offset, Bool );
741 extern void genReload_AMD64 ( /*OUT*/HInstr** i1, /*OUT*/HInstr** i2,
742                               HReg rreg, Int offset, Bool );
743 
744 extern void         getAllocableRegs_AMD64 ( Int*, HReg** );
745 extern HInstrArray* iselSB_AMD64           ( IRSB*, VexArch,
746                                                     VexArchInfo*,
747                                                     VexAbiInfo* );
748 
749 #endif /* ndef __VEX_HOST_AMD64_DEFS_H */
750 
751 /*---------------------------------------------------------------*/
752 /*--- end                                   host_amd64_defs.h ---*/
753 /*---------------------------------------------------------------*/
754