• Home
  • Raw
  • Download

Lines Matching refs:Ain

611    i->Ain.Imm64.imm64 = imm64;  in AMD64Instr_Imm64()
612 i->Ain.Imm64.dst = dst; in AMD64Instr_Imm64()
618 i->Ain.Alu64R.op = op; in AMD64Instr_Alu64R()
619 i->Ain.Alu64R.src = src; in AMD64Instr_Alu64R()
620 i->Ain.Alu64R.dst = dst; in AMD64Instr_Alu64R()
626 i->Ain.Alu64M.op = op; in AMD64Instr_Alu64M()
627 i->Ain.Alu64M.src = src; in AMD64Instr_Alu64M()
628 i->Ain.Alu64M.dst = dst; in AMD64Instr_Alu64M()
635 i->Ain.Sh64.op = op; in AMD64Instr_Sh64()
636 i->Ain.Sh64.src = src; in AMD64Instr_Sh64()
637 i->Ain.Sh64.dst = dst; in AMD64Instr_Sh64()
643 i->Ain.Test64.imm32 = imm32; in AMD64Instr_Test64()
644 i->Ain.Test64.dst = dst; in AMD64Instr_Test64()
650 i->Ain.Unary64.op = op; in AMD64Instr_Unary64()
651 i->Ain.Unary64.dst = dst; in AMD64Instr_Unary64()
657 i->Ain.Lea64.am = am; in AMD64Instr_Lea64()
658 i->Ain.Lea64.dst = dst; in AMD64Instr_Lea64()
664 i->Ain.Alu32R.op = op; in AMD64Instr_Alu32R()
665 i->Ain.Alu32R.src = src; in AMD64Instr_Alu32R()
666 i->Ain.Alu32R.dst = dst; in AMD64Instr_Alu32R()
677 i->Ain.MulL.syned = syned; in AMD64Instr_MulL()
678 i->Ain.MulL.src = src; in AMD64Instr_MulL()
684 i->Ain.Div.syned = syned; in AMD64Instr_Div()
685 i->Ain.Div.sz = sz; in AMD64Instr_Div()
686 i->Ain.Div.src = src; in AMD64Instr_Div()
693 i->Ain.Push.src = src; in AMD64Instr_Push()
700 i->Ain.Call.cond = cond; in AMD64Instr_Call()
701 i->Ain.Call.target = target; in AMD64Instr_Call()
702 i->Ain.Call.regparms = regparms; in AMD64Instr_Call()
703 i->Ain.Call.rloc = rloc; in AMD64Instr_Call()
713 i->Ain.XDirect.dstGA = dstGA; in AMD64Instr_XDirect()
714 i->Ain.XDirect.amRIP = amRIP; in AMD64Instr_XDirect()
715 i->Ain.XDirect.cond = cond; in AMD64Instr_XDirect()
716 i->Ain.XDirect.toFastEP = toFastEP; in AMD64Instr_XDirect()
723 i->Ain.XIndir.dstGA = dstGA; in AMD64Instr_XIndir()
724 i->Ain.XIndir.amRIP = amRIP; in AMD64Instr_XIndir()
725 i->Ain.XIndir.cond = cond; in AMD64Instr_XIndir()
732 i->Ain.XAssisted.dstGA = dstGA; in AMD64Instr_XAssisted()
733 i->Ain.XAssisted.amRIP = amRIP; in AMD64Instr_XAssisted()
734 i->Ain.XAssisted.cond = cond; in AMD64Instr_XAssisted()
735 i->Ain.XAssisted.jk = jk; in AMD64Instr_XAssisted()
742 i->Ain.CMov64.cond = cond; in AMD64Instr_CMov64()
743 i->Ain.CMov64.src = src; in AMD64Instr_CMov64()
744 i->Ain.CMov64.dst = dst; in AMD64Instr_CMov64()
751 i->Ain.MovxLQ.syned = syned; in AMD64Instr_MovxLQ()
752 i->Ain.MovxLQ.src = src; in AMD64Instr_MovxLQ()
753 i->Ain.MovxLQ.dst = dst; in AMD64Instr_MovxLQ()
760 i->Ain.LoadEX.szSmall = szSmall; in AMD64Instr_LoadEX()
761 i->Ain.LoadEX.syned = syned; in AMD64Instr_LoadEX()
762 i->Ain.LoadEX.src = src; in AMD64Instr_LoadEX()
763 i->Ain.LoadEX.dst = dst; in AMD64Instr_LoadEX()
770 i->Ain.Store.sz = sz; in AMD64Instr_Store()
771 i->Ain.Store.src = src; in AMD64Instr_Store()
772 i->Ain.Store.dst = dst; in AMD64Instr_Store()
779 i->Ain.Set64.cond = cond; in AMD64Instr_Set64()
780 i->Ain.Set64.dst = dst; in AMD64Instr_Set64()
786 i->Ain.Bsfr64.isFwds = isFwds; in AMD64Instr_Bsfr64()
787 i->Ain.Bsfr64.src = src; in AMD64Instr_Bsfr64()
788 i->Ain.Bsfr64.dst = dst; in AMD64Instr_Bsfr64()
799 i->Ain.ACAS.addr = addr; in AMD64Instr_ACAS()
800 i->Ain.ACAS.sz = sz; in AMD64Instr_ACAS()
807 i->Ain.DACAS.addr = addr; in AMD64Instr_DACAS()
808 i->Ain.DACAS.sz = sz; in AMD64Instr_DACAS()
817 i->Ain.A87Free.nregs = nregs; in AMD64Instr_A87Free()
825 i->Ain.A87PushPop.addr = addr; in AMD64Instr_A87PushPop()
826 i->Ain.A87PushPop.isPush = isPush; in AMD64Instr_A87PushPop()
827 i->Ain.A87PushPop.szB = szB; in AMD64Instr_A87PushPop()
835 i->Ain.A87FpOp.op = op; in AMD64Instr_A87FpOp()
842 i->Ain.A87LdCW.addr = addr; in AMD64Instr_A87LdCW()
849 i->Ain.A87StSW.addr = addr; in AMD64Instr_A87StSW()
855 i->Ain.LdMXCSR.addr = addr; in AMD64Instr_LdMXCSR()
861 i->Ain.SseUComIS.sz = toUChar(sz); in AMD64Instr_SseUComIS()
862 i->Ain.SseUComIS.srcL = srcL; in AMD64Instr_SseUComIS()
863 i->Ain.SseUComIS.srcR = srcR; in AMD64Instr_SseUComIS()
864 i->Ain.SseUComIS.dst = dst; in AMD64Instr_SseUComIS()
871 i->Ain.SseSI2SF.szS = toUChar(szS); in AMD64Instr_SseSI2SF()
872 i->Ain.SseSI2SF.szD = toUChar(szD); in AMD64Instr_SseSI2SF()
873 i->Ain.SseSI2SF.src = src; in AMD64Instr_SseSI2SF()
874 i->Ain.SseSI2SF.dst = dst; in AMD64Instr_SseSI2SF()
882 i->Ain.SseSF2SI.szS = toUChar(szS); in AMD64Instr_SseSF2SI()
883 i->Ain.SseSF2SI.szD = toUChar(szD); in AMD64Instr_SseSF2SI()
884 i->Ain.SseSF2SI.src = src; in AMD64Instr_SseSF2SI()
885 i->Ain.SseSF2SI.dst = dst; in AMD64Instr_SseSF2SI()
894 i->Ain.SseSDSS.from64 = from64; in AMD64Instr_SseSDSS()
895 i->Ain.SseSDSS.src = src; in AMD64Instr_SseSDSS()
896 i->Ain.SseSDSS.dst = dst; in AMD64Instr_SseSDSS()
903 i->Ain.SseLdSt.isLoad = isLoad; in AMD64Instr_SseLdSt()
904 i->Ain.SseLdSt.sz = toUChar(sz); in AMD64Instr_SseLdSt()
905 i->Ain.SseLdSt.reg = reg; in AMD64Instr_SseLdSt()
906 i->Ain.SseLdSt.addr = addr; in AMD64Instr_SseLdSt()
914 i->Ain.SseLdzLO.sz = sz; in AMD64Instr_SseLdzLO()
915 i->Ain.SseLdzLO.reg = reg; in AMD64Instr_SseLdzLO()
916 i->Ain.SseLdzLO.addr = addr; in AMD64Instr_SseLdzLO()
923 i->Ain.Sse32Fx4.op = op; in AMD64Instr_Sse32Fx4()
924 i->Ain.Sse32Fx4.src = src; in AMD64Instr_Sse32Fx4()
925 i->Ain.Sse32Fx4.dst = dst; in AMD64Instr_Sse32Fx4()
932 i->Ain.Sse32FLo.op = op; in AMD64Instr_Sse32FLo()
933 i->Ain.Sse32FLo.src = src; in AMD64Instr_Sse32FLo()
934 i->Ain.Sse32FLo.dst = dst; in AMD64Instr_Sse32FLo()
941 i->Ain.Sse64Fx2.op = op; in AMD64Instr_Sse64Fx2()
942 i->Ain.Sse64Fx2.src = src; in AMD64Instr_Sse64Fx2()
943 i->Ain.Sse64Fx2.dst = dst; in AMD64Instr_Sse64Fx2()
950 i->Ain.Sse64FLo.op = op; in AMD64Instr_Sse64FLo()
951 i->Ain.Sse64FLo.src = src; in AMD64Instr_Sse64FLo()
952 i->Ain.Sse64FLo.dst = dst; in AMD64Instr_Sse64FLo()
959 i->Ain.SseReRg.op = op; in AMD64Instr_SseReRg()
960 i->Ain.SseReRg.src = re; in AMD64Instr_SseReRg()
961 i->Ain.SseReRg.dst = rg; in AMD64Instr_SseReRg()
967 i->Ain.SseCMov.cond = cond; in AMD64Instr_SseCMov()
968 i->Ain.SseCMov.src = src; in AMD64Instr_SseCMov()
969 i->Ain.SseCMov.dst = dst; in AMD64Instr_SseCMov()
976 i->Ain.SseShuf.order = order; in AMD64Instr_SseShuf()
977 i->Ain.SseShuf.src = src; in AMD64Instr_SseShuf()
978 i->Ain.SseShuf.dst = dst; in AMD64Instr_SseShuf()
1003 i->Ain.EvCheck.amCounter = amCounter; in AMD64Instr_EvCheck()
1004 i->Ain.EvCheck.amFailAddr = amFailAddr; in AMD64Instr_EvCheck()
1018 vex_printf("movabsq $0x%llx,", i->Ain.Imm64.imm64); in ppAMD64Instr()
1019 ppHRegAMD64(i->Ain.Imm64.dst); in ppAMD64Instr()
1022 vex_printf("%sq ", showAMD64AluOp(i->Ain.Alu64R.op)); in ppAMD64Instr()
1023 ppAMD64RMI(i->Ain.Alu64R.src); in ppAMD64Instr()
1025 ppHRegAMD64(i->Ain.Alu64R.dst); in ppAMD64Instr()
1028 vex_printf("%sq ", showAMD64AluOp(i->Ain.Alu64M.op)); in ppAMD64Instr()
1029 ppAMD64RI(i->Ain.Alu64M.src); in ppAMD64Instr()
1031 ppAMD64AMode(i->Ain.Alu64M.dst); in ppAMD64Instr()
1034 vex_printf("%sq ", showAMD64ShiftOp(i->Ain.Sh64.op)); in ppAMD64Instr()
1035 if (i->Ain.Sh64.src == 0) in ppAMD64Instr()
1038 vex_printf("$%d,", (Int)i->Ain.Sh64.src); in ppAMD64Instr()
1039 ppHRegAMD64(i->Ain.Sh64.dst); in ppAMD64Instr()
1042 vex_printf("testq $%d,", (Int)i->Ain.Test64.imm32); in ppAMD64Instr()
1043 ppHRegAMD64(i->Ain.Test64.dst); in ppAMD64Instr()
1046 vex_printf("%sq ", showAMD64UnaryOp(i->Ain.Unary64.op)); in ppAMD64Instr()
1047 ppHRegAMD64(i->Ain.Unary64.dst); in ppAMD64Instr()
1051 ppAMD64AMode(i->Ain.Lea64.am); in ppAMD64Instr()
1053 ppHRegAMD64(i->Ain.Lea64.dst); in ppAMD64Instr()
1056 vex_printf("%sl ", showAMD64AluOp(i->Ain.Alu32R.op)); in ppAMD64Instr()
1057 ppAMD64RMI_lo32(i->Ain.Alu32R.src); in ppAMD64Instr()
1059 ppHRegAMD64_lo32(i->Ain.Alu32R.dst); in ppAMD64Instr()
1062 vex_printf("%cmulq ", i->Ain.MulL.syned ? 's' : 'u'); in ppAMD64Instr()
1063 ppAMD64RM(i->Ain.MulL.src); in ppAMD64Instr()
1067 i->Ain.Div.syned ? 's' : 'u', in ppAMD64Instr()
1068 showAMD64ScalarSz(i->Ain.Div.sz)); in ppAMD64Instr()
1069 ppAMD64RM(i->Ain.Div.src); in ppAMD64Instr()
1073 ppAMD64RMI(i->Ain.Push.src); in ppAMD64Instr()
1077 i->Ain.Call.cond==Acc_ALWAYS in ppAMD64Instr()
1078 ? "" : showAMD64CondCode(i->Ain.Call.cond), in ppAMD64Instr()
1079 i->Ain.Call.regparms ); in ppAMD64Instr()
1080 ppRetLoc(i->Ain.Call.rloc); in ppAMD64Instr()
1081 vex_printf("] 0x%llx", i->Ain.Call.target); in ppAMD64Instr()
1087 showAMD64CondCode(i->Ain.XDirect.cond)); in ppAMD64Instr()
1088 vex_printf("movabsq $0x%llx,%%r11; ", i->Ain.XDirect.dstGA); in ppAMD64Instr()
1090 ppAMD64AMode(i->Ain.XDirect.amRIP); in ppAMD64Instr()
1093 i->Ain.XDirect.toFastEP ? "fast" : "slow"); in ppAMD64Instr()
1098 showAMD64CondCode(i->Ain.XIndir.cond)); in ppAMD64Instr()
1100 ppHRegAMD64(i->Ain.XIndir.dstGA); in ppAMD64Instr()
1102 ppAMD64AMode(i->Ain.XIndir.amRIP); in ppAMD64Instr()
1108 showAMD64CondCode(i->Ain.XAssisted.cond)); in ppAMD64Instr()
1110 ppHRegAMD64(i->Ain.XAssisted.dstGA); in ppAMD64Instr()
1112 ppAMD64AMode(i->Ain.XAssisted.amRIP); in ppAMD64Instr()
1114 (Int)i->Ain.XAssisted.jk); in ppAMD64Instr()
1119 vex_printf("cmov%s ", showAMD64CondCode(i->Ain.CMov64.cond)); in ppAMD64Instr()
1120 ppAMD64RM(i->Ain.CMov64.src); in ppAMD64Instr()
1122 ppHRegAMD64(i->Ain.CMov64.dst); in ppAMD64Instr()
1125 vex_printf("mov%clq ", i->Ain.MovxLQ.syned ? 's' : 'z'); in ppAMD64Instr()
1126 ppHRegAMD64_lo32(i->Ain.MovxLQ.src); in ppAMD64Instr()
1128 ppHRegAMD64(i->Ain.MovxLQ.dst); in ppAMD64Instr()
1131 if (i->Ain.LoadEX.szSmall==4 && !i->Ain.LoadEX.syned) { in ppAMD64Instr()
1133 ppAMD64AMode(i->Ain.LoadEX.src); in ppAMD64Instr()
1135 ppHRegAMD64_lo32(i->Ain.LoadEX.dst); in ppAMD64Instr()
1138 i->Ain.LoadEX.syned ? 's' : 'z', in ppAMD64Instr()
1139 i->Ain.LoadEX.szSmall==1 in ppAMD64Instr()
1141 : (i->Ain.LoadEX.szSmall==2 ? 'w' : 'l')); in ppAMD64Instr()
1142 ppAMD64AMode(i->Ain.LoadEX.src); in ppAMD64Instr()
1144 ppHRegAMD64(i->Ain.LoadEX.dst); in ppAMD64Instr()
1148 vex_printf("mov%c ", i->Ain.Store.sz==1 ? 'b' in ppAMD64Instr()
1149 : (i->Ain.Store.sz==2 ? 'w' : 'l')); in ppAMD64Instr()
1150 ppHRegAMD64(i->Ain.Store.src); in ppAMD64Instr()
1152 ppAMD64AMode(i->Ain.Store.dst); in ppAMD64Instr()
1155 vex_printf("setq%s ", showAMD64CondCode(i->Ain.Set64.cond)); in ppAMD64Instr()
1156 ppHRegAMD64(i->Ain.Set64.dst); in ppAMD64Instr()
1159 vex_printf("bs%cq ", i->Ain.Bsfr64.isFwds ? 'f' : 'r'); in ppAMD64Instr()
1160 ppHRegAMD64(i->Ain.Bsfr64.src); in ppAMD64Instr()
1162 ppHRegAMD64(i->Ain.Bsfr64.dst); in ppAMD64Instr()
1169 i->Ain.ACAS.sz==1 ? 'b' : i->Ain.ACAS.sz==2 ? 'w' in ppAMD64Instr()
1170 : i->Ain.ACAS.sz==4 ? 'l' : 'q' ); in ppAMD64Instr()
1172 ppAMD64AMode(i->Ain.ACAS.addr); in ppAMD64Instr()
1176 (Int)(2 * i->Ain.DACAS.sz)); in ppAMD64Instr()
1177 ppAMD64AMode(i->Ain.DACAS.addr); in ppAMD64Instr()
1180 vex_printf("ffree %%st(7..%d)", 8 - i->Ain.A87Free.nregs ); in ppAMD64Instr()
1183 vex_printf(i->Ain.A87PushPop.isPush ? "fld%c " : "fstp%c ", in ppAMD64Instr()
1184 i->Ain.A87PushPop.szB == 4 ? 's' : 'l'); in ppAMD64Instr()
1185 ppAMD64AMode(i->Ain.A87PushPop.addr); in ppAMD64Instr()
1188 vex_printf("f%s", showA87FpOp(i->Ain.A87FpOp.op)); in ppAMD64Instr()
1192 ppAMD64AMode(i->Ain.A87LdCW.addr); in ppAMD64Instr()
1196 ppAMD64AMode(i->Ain.A87StSW.addr); in ppAMD64Instr()
1200 ppAMD64AMode(i->Ain.LdMXCSR.addr); in ppAMD64Instr()
1203 vex_printf("ucomis%s ", i->Ain.SseUComIS.sz==4 ? "s" : "d"); in ppAMD64Instr()
1204 ppHRegAMD64(i->Ain.SseUComIS.srcL); in ppAMD64Instr()
1206 ppHRegAMD64(i->Ain.SseUComIS.srcR); in ppAMD64Instr()
1208 ppHRegAMD64(i->Ain.SseUComIS.dst); in ppAMD64Instr()
1211 vex_printf("cvtsi2s%s ", i->Ain.SseSI2SF.szD==4 ? "s" : "d"); in ppAMD64Instr()
1212 (i->Ain.SseSI2SF.szS==4 ? ppHRegAMD64_lo32 : ppHRegAMD64) in ppAMD64Instr()
1213 (i->Ain.SseSI2SF.src); in ppAMD64Instr()
1215 ppHRegAMD64(i->Ain.SseSI2SF.dst); in ppAMD64Instr()
1218 vex_printf("cvts%s2si ", i->Ain.SseSF2SI.szS==4 ? "s" : "d"); in ppAMD64Instr()
1219 ppHRegAMD64(i->Ain.SseSF2SI.src); in ppAMD64Instr()
1221 (i->Ain.SseSF2SI.szD==4 ? ppHRegAMD64_lo32 : ppHRegAMD64) in ppAMD64Instr()
1222 (i->Ain.SseSF2SI.dst); in ppAMD64Instr()
1225 vex_printf(i->Ain.SseSDSS.from64 ? "cvtsd2ss " : "cvtss2sd "); in ppAMD64Instr()
1226 ppHRegAMD64(i->Ain.SseSDSS.src); in ppAMD64Instr()
1228 ppHRegAMD64(i->Ain.SseSDSS.dst); in ppAMD64Instr()
1231 switch (i->Ain.SseLdSt.sz) { in ppAMD64Instr()
1237 if (i->Ain.SseLdSt.isLoad) { in ppAMD64Instr()
1238 ppAMD64AMode(i->Ain.SseLdSt.addr); in ppAMD64Instr()
1240 ppHRegAMD64(i->Ain.SseLdSt.reg); in ppAMD64Instr()
1242 ppHRegAMD64(i->Ain.SseLdSt.reg); in ppAMD64Instr()
1244 ppAMD64AMode(i->Ain.SseLdSt.addr); in ppAMD64Instr()
1248 vex_printf("movs%s ", i->Ain.SseLdzLO.sz==4 ? "s" : "d"); in ppAMD64Instr()
1249 ppAMD64AMode(i->Ain.SseLdzLO.addr); in ppAMD64Instr()
1251 ppHRegAMD64(i->Ain.SseLdzLO.reg); in ppAMD64Instr()
1254 vex_printf("%sps ", showAMD64SseOp(i->Ain.Sse32Fx4.op)); in ppAMD64Instr()
1255 ppHRegAMD64(i->Ain.Sse32Fx4.src); in ppAMD64Instr()
1257 ppHRegAMD64(i->Ain.Sse32Fx4.dst); in ppAMD64Instr()
1260 vex_printf("%sss ", showAMD64SseOp(i->Ain.Sse32FLo.op)); in ppAMD64Instr()
1261 ppHRegAMD64(i->Ain.Sse32FLo.src); in ppAMD64Instr()
1263 ppHRegAMD64(i->Ain.Sse32FLo.dst); in ppAMD64Instr()
1266 vex_printf("%spd ", showAMD64SseOp(i->Ain.Sse64Fx2.op)); in ppAMD64Instr()
1267 ppHRegAMD64(i->Ain.Sse64Fx2.src); in ppAMD64Instr()
1269 ppHRegAMD64(i->Ain.Sse64Fx2.dst); in ppAMD64Instr()
1272 vex_printf("%ssd ", showAMD64SseOp(i->Ain.Sse64FLo.op)); in ppAMD64Instr()
1273 ppHRegAMD64(i->Ain.Sse64FLo.src); in ppAMD64Instr()
1275 ppHRegAMD64(i->Ain.Sse64FLo.dst); in ppAMD64Instr()
1278 vex_printf("%s ", showAMD64SseOp(i->Ain.SseReRg.op)); in ppAMD64Instr()
1279 ppHRegAMD64(i->Ain.SseReRg.src); in ppAMD64Instr()
1281 ppHRegAMD64(i->Ain.SseReRg.dst); in ppAMD64Instr()
1284 vex_printf("cmov%s ", showAMD64CondCode(i->Ain.SseCMov.cond)); in ppAMD64Instr()
1285 ppHRegAMD64(i->Ain.SseCMov.src); in ppAMD64Instr()
1287 ppHRegAMD64(i->Ain.SseCMov.dst); in ppAMD64Instr()
1290 vex_printf("pshufd $0x%x,", i->Ain.SseShuf.order); in ppAMD64Instr()
1291 ppHRegAMD64(i->Ain.SseShuf.src); in ppAMD64Instr()
1293 ppHRegAMD64(i->Ain.SseShuf.dst); in ppAMD64Instr()
1315 ppAMD64AMode(i->Ain.EvCheck.amCounter); in ppAMD64Instr()
1317 ppAMD64AMode(i->Ain.EvCheck.amFailAddr); in ppAMD64Instr()
1337 addHRegUse(u, HRmWrite, i->Ain.Imm64.dst); in getRegUsage_AMD64Instr()
1340 addRegUsage_AMD64RMI(u, i->Ain.Alu64R.src); in getRegUsage_AMD64Instr()
1341 if (i->Ain.Alu64R.op == Aalu_MOV) { in getRegUsage_AMD64Instr()
1342 addHRegUse(u, HRmWrite, i->Ain.Alu64R.dst); in getRegUsage_AMD64Instr()
1345 if (i->Ain.Alu64R.op == Aalu_CMP) { in getRegUsage_AMD64Instr()
1346 addHRegUse(u, HRmRead, i->Ain.Alu64R.dst); in getRegUsage_AMD64Instr()
1349 addHRegUse(u, HRmModify, i->Ain.Alu64R.dst); in getRegUsage_AMD64Instr()
1352 addRegUsage_AMD64RI(u, i->Ain.Alu64M.src); in getRegUsage_AMD64Instr()
1353 addRegUsage_AMD64AMode(u, i->Ain.Alu64M.dst); in getRegUsage_AMD64Instr()
1356 addHRegUse(u, HRmModify, i->Ain.Sh64.dst); in getRegUsage_AMD64Instr()
1357 if (i->Ain.Sh64.src == 0) in getRegUsage_AMD64Instr()
1361 addHRegUse(u, HRmRead, i->Ain.Test64.dst); in getRegUsage_AMD64Instr()
1364 addHRegUse(u, HRmModify, i->Ain.Unary64.dst); in getRegUsage_AMD64Instr()
1367 addRegUsage_AMD64AMode(u, i->Ain.Lea64.am); in getRegUsage_AMD64Instr()
1368 addHRegUse(u, HRmWrite, i->Ain.Lea64.dst); in getRegUsage_AMD64Instr()
1371 vassert(i->Ain.Alu32R.op != Aalu_MOV); in getRegUsage_AMD64Instr()
1372 addRegUsage_AMD64RMI(u, i->Ain.Alu32R.src); in getRegUsage_AMD64Instr()
1373 if (i->Ain.Alu32R.op == Aalu_CMP) { in getRegUsage_AMD64Instr()
1374 addHRegUse(u, HRmRead, i->Ain.Alu32R.dst); in getRegUsage_AMD64Instr()
1377 addHRegUse(u, HRmModify, i->Ain.Alu32R.dst); in getRegUsage_AMD64Instr()
1380 addRegUsage_AMD64RM(u, i->Ain.MulL.src, HRmRead); in getRegUsage_AMD64Instr()
1385 addRegUsage_AMD64RM(u, i->Ain.Div.src, HRmRead); in getRegUsage_AMD64Instr()
1390 addRegUsage_AMD64RMI(u, i->Ain.Push.src); in getRegUsage_AMD64Instr()
1424 switch (i->Ain.Call.regparms) { in getRegUsage_AMD64Instr()
1450 addRegUsage_AMD64AMode(u, i->Ain.XDirect.amRIP); in getRegUsage_AMD64Instr()
1454 addHRegUse(u, HRmRead, i->Ain.XIndir.dstGA); in getRegUsage_AMD64Instr()
1455 addRegUsage_AMD64AMode(u, i->Ain.XIndir.amRIP); in getRegUsage_AMD64Instr()
1459 addHRegUse(u, HRmRead, i->Ain.XAssisted.dstGA); in getRegUsage_AMD64Instr()
1460 addRegUsage_AMD64AMode(u, i->Ain.XAssisted.amRIP); in getRegUsage_AMD64Instr()
1463 addRegUsage_AMD64RM(u, i->Ain.CMov64.src, HRmRead); in getRegUsage_AMD64Instr()
1464 addHRegUse(u, HRmModify, i->Ain.CMov64.dst); in getRegUsage_AMD64Instr()
1467 addHRegUse(u, HRmRead, i->Ain.MovxLQ.src); in getRegUsage_AMD64Instr()
1468 addHRegUse(u, HRmWrite, i->Ain.MovxLQ.dst); in getRegUsage_AMD64Instr()
1471 addRegUsage_AMD64AMode(u, i->Ain.LoadEX.src); in getRegUsage_AMD64Instr()
1472 addHRegUse(u, HRmWrite, i->Ain.LoadEX.dst); in getRegUsage_AMD64Instr()
1475 addHRegUse(u, HRmRead, i->Ain.Store.src); in getRegUsage_AMD64Instr()
1476 addRegUsage_AMD64AMode(u, i->Ain.Store.dst); in getRegUsage_AMD64Instr()
1479 addHRegUse(u, HRmWrite, i->Ain.Set64.dst); in getRegUsage_AMD64Instr()
1482 addHRegUse(u, HRmRead, i->Ain.Bsfr64.src); in getRegUsage_AMD64Instr()
1483 addHRegUse(u, HRmWrite, i->Ain.Bsfr64.dst); in getRegUsage_AMD64Instr()
1488 addRegUsage_AMD64AMode(u, i->Ain.ACAS.addr); in getRegUsage_AMD64Instr()
1493 addRegUsage_AMD64AMode(u, i->Ain.DACAS.addr); in getRegUsage_AMD64Instr()
1502 addRegUsage_AMD64AMode(u, i->Ain.A87PushPop.addr); in getRegUsage_AMD64Instr()
1507 addRegUsage_AMD64AMode(u, i->Ain.A87LdCW.addr); in getRegUsage_AMD64Instr()
1510 addRegUsage_AMD64AMode(u, i->Ain.A87StSW.addr); in getRegUsage_AMD64Instr()
1513 addRegUsage_AMD64AMode(u, i->Ain.LdMXCSR.addr); in getRegUsage_AMD64Instr()
1516 addHRegUse(u, HRmRead, i->Ain.SseUComIS.srcL); in getRegUsage_AMD64Instr()
1517 addHRegUse(u, HRmRead, i->Ain.SseUComIS.srcR); in getRegUsage_AMD64Instr()
1518 addHRegUse(u, HRmWrite, i->Ain.SseUComIS.dst); in getRegUsage_AMD64Instr()
1521 addHRegUse(u, HRmRead, i->Ain.SseSI2SF.src); in getRegUsage_AMD64Instr()
1522 addHRegUse(u, HRmWrite, i->Ain.SseSI2SF.dst); in getRegUsage_AMD64Instr()
1525 addHRegUse(u, HRmRead, i->Ain.SseSF2SI.src); in getRegUsage_AMD64Instr()
1526 addHRegUse(u, HRmWrite, i->Ain.SseSF2SI.dst); in getRegUsage_AMD64Instr()
1529 addHRegUse(u, HRmRead, i->Ain.SseSDSS.src); in getRegUsage_AMD64Instr()
1530 addHRegUse(u, HRmWrite, i->Ain.SseSDSS.dst); in getRegUsage_AMD64Instr()
1533 addRegUsage_AMD64AMode(u, i->Ain.SseLdSt.addr); in getRegUsage_AMD64Instr()
1534 addHRegUse(u, i->Ain.SseLdSt.isLoad ? HRmWrite : HRmRead, in getRegUsage_AMD64Instr()
1535 i->Ain.SseLdSt.reg); in getRegUsage_AMD64Instr()
1538 addRegUsage_AMD64AMode(u, i->Ain.SseLdzLO.addr); in getRegUsage_AMD64Instr()
1539 addHRegUse(u, HRmWrite, i->Ain.SseLdzLO.reg); in getRegUsage_AMD64Instr()
1542 vassert(i->Ain.Sse32Fx4.op != Asse_MOV); in getRegUsage_AMD64Instr()
1543 unary = toBool( i->Ain.Sse32Fx4.op == Asse_RCPF in getRegUsage_AMD64Instr()
1544 || i->Ain.Sse32Fx4.op == Asse_RSQRTF in getRegUsage_AMD64Instr()
1545 || i->Ain.Sse32Fx4.op == Asse_SQRTF ); in getRegUsage_AMD64Instr()
1546 addHRegUse(u, HRmRead, i->Ain.Sse32Fx4.src); in getRegUsage_AMD64Instr()
1548 i->Ain.Sse32Fx4.dst); in getRegUsage_AMD64Instr()
1551 vassert(i->Ain.Sse32FLo.op != Asse_MOV); in getRegUsage_AMD64Instr()
1552 unary = toBool( i->Ain.Sse32FLo.op == Asse_RCPF in getRegUsage_AMD64Instr()
1553 || i->Ain.Sse32FLo.op == Asse_RSQRTF in getRegUsage_AMD64Instr()
1554 || i->Ain.Sse32FLo.op == Asse_SQRTF ); in getRegUsage_AMD64Instr()
1555 addHRegUse(u, HRmRead, i->Ain.Sse32FLo.src); in getRegUsage_AMD64Instr()
1557 i->Ain.Sse32FLo.dst); in getRegUsage_AMD64Instr()
1560 vassert(i->Ain.Sse64Fx2.op != Asse_MOV); in getRegUsage_AMD64Instr()
1561 unary = toBool( i->Ain.Sse64Fx2.op == Asse_RCPF in getRegUsage_AMD64Instr()
1562 || i->Ain.Sse64Fx2.op == Asse_RSQRTF in getRegUsage_AMD64Instr()
1563 || i->Ain.Sse64Fx2.op == Asse_SQRTF ); in getRegUsage_AMD64Instr()
1564 addHRegUse(u, HRmRead, i->Ain.Sse64Fx2.src); in getRegUsage_AMD64Instr()
1566 i->Ain.Sse64Fx2.dst); in getRegUsage_AMD64Instr()
1569 vassert(i->Ain.Sse64FLo.op != Asse_MOV); in getRegUsage_AMD64Instr()
1570 unary = toBool( i->Ain.Sse64FLo.op == Asse_RCPF in getRegUsage_AMD64Instr()
1571 || i->Ain.Sse64FLo.op == Asse_RSQRTF in getRegUsage_AMD64Instr()
1572 || i->Ain.Sse64FLo.op == Asse_SQRTF ); in getRegUsage_AMD64Instr()
1573 addHRegUse(u, HRmRead, i->Ain.Sse64FLo.src); in getRegUsage_AMD64Instr()
1575 i->Ain.Sse64FLo.dst); in getRegUsage_AMD64Instr()
1578 if ( (i->Ain.SseReRg.op == Asse_XOR in getRegUsage_AMD64Instr()
1579 || i->Ain.SseReRg.op == Asse_CMPEQ32) in getRegUsage_AMD64Instr()
1580 && sameHReg(i->Ain.SseReRg.src, i->Ain.SseReRg.dst)) { in getRegUsage_AMD64Instr()
1585 addHRegUse(u, HRmWrite, i->Ain.SseReRg.dst); in getRegUsage_AMD64Instr()
1587 addHRegUse(u, HRmRead, i->Ain.SseReRg.src); in getRegUsage_AMD64Instr()
1588 addHRegUse(u, i->Ain.SseReRg.op == Asse_MOV in getRegUsage_AMD64Instr()
1590 i->Ain.SseReRg.dst); in getRegUsage_AMD64Instr()
1594 addHRegUse(u, HRmRead, i->Ain.SseCMov.src); in getRegUsage_AMD64Instr()
1595 addHRegUse(u, HRmModify, i->Ain.SseCMov.dst); in getRegUsage_AMD64Instr()
1598 addHRegUse(u, HRmRead, i->Ain.SseShuf.src); in getRegUsage_AMD64Instr()
1599 addHRegUse(u, HRmWrite, i->Ain.SseShuf.dst); in getRegUsage_AMD64Instr()
1622 addRegUsage_AMD64AMode(u, i->Ain.EvCheck.amCounter); in getRegUsage_AMD64Instr()
1623 addRegUsage_AMD64AMode(u, i->Ain.EvCheck.amFailAddr); in getRegUsage_AMD64Instr()
1645 mapReg(m, &i->Ain.Imm64.dst); in mapRegs_AMD64Instr()
1648 mapRegs_AMD64RMI(m, i->Ain.Alu64R.src); in mapRegs_AMD64Instr()
1649 mapReg(m, &i->Ain.Alu64R.dst); in mapRegs_AMD64Instr()
1652 mapRegs_AMD64RI(m, i->Ain.Alu64M.src); in mapRegs_AMD64Instr()
1653 mapRegs_AMD64AMode(m, i->Ain.Alu64M.dst); in mapRegs_AMD64Instr()
1656 mapReg(m, &i->Ain.Sh64.dst); in mapRegs_AMD64Instr()
1659 mapReg(m, &i->Ain.Test64.dst); in mapRegs_AMD64Instr()
1662 mapReg(m, &i->Ain.Unary64.dst); in mapRegs_AMD64Instr()
1665 mapRegs_AMD64AMode(m, i->Ain.Lea64.am); in mapRegs_AMD64Instr()
1666 mapReg(m, &i->Ain.Lea64.dst); in mapRegs_AMD64Instr()
1669 mapRegs_AMD64RMI(m, i->Ain.Alu32R.src); in mapRegs_AMD64Instr()
1670 mapReg(m, &i->Ain.Alu32R.dst); in mapRegs_AMD64Instr()
1673 mapRegs_AMD64RM(m, i->Ain.MulL.src); in mapRegs_AMD64Instr()
1676 mapRegs_AMD64RM(m, i->Ain.Div.src); in mapRegs_AMD64Instr()
1679 mapRegs_AMD64RMI(m, i->Ain.Push.src); in mapRegs_AMD64Instr()
1684 mapRegs_AMD64AMode(m, i->Ain.XDirect.amRIP); in mapRegs_AMD64Instr()
1687 mapReg(m, &i->Ain.XIndir.dstGA); in mapRegs_AMD64Instr()
1688 mapRegs_AMD64AMode(m, i->Ain.XIndir.amRIP); in mapRegs_AMD64Instr()
1691 mapReg(m, &i->Ain.XAssisted.dstGA); in mapRegs_AMD64Instr()
1692 mapRegs_AMD64AMode(m, i->Ain.XAssisted.amRIP); in mapRegs_AMD64Instr()
1695 mapRegs_AMD64RM(m, i->Ain.CMov64.src); in mapRegs_AMD64Instr()
1696 mapReg(m, &i->Ain.CMov64.dst); in mapRegs_AMD64Instr()
1699 mapReg(m, &i->Ain.MovxLQ.src); in mapRegs_AMD64Instr()
1700 mapReg(m, &i->Ain.MovxLQ.dst); in mapRegs_AMD64Instr()
1703 mapRegs_AMD64AMode(m, i->Ain.LoadEX.src); in mapRegs_AMD64Instr()
1704 mapReg(m, &i->Ain.LoadEX.dst); in mapRegs_AMD64Instr()
1707 mapReg(m, &i->Ain.Store.src); in mapRegs_AMD64Instr()
1708 mapRegs_AMD64AMode(m, i->Ain.Store.dst); in mapRegs_AMD64Instr()
1711 mapReg(m, &i->Ain.Set64.dst); in mapRegs_AMD64Instr()
1714 mapReg(m, &i->Ain.Bsfr64.src); in mapRegs_AMD64Instr()
1715 mapReg(m, &i->Ain.Bsfr64.dst); in mapRegs_AMD64Instr()
1720 mapRegs_AMD64AMode(m, i->Ain.ACAS.addr); in mapRegs_AMD64Instr()
1723 mapRegs_AMD64AMode(m, i->Ain.DACAS.addr); in mapRegs_AMD64Instr()
1728 mapRegs_AMD64AMode(m, i->Ain.A87PushPop.addr); in mapRegs_AMD64Instr()
1733 mapRegs_AMD64AMode(m, i->Ain.A87LdCW.addr); in mapRegs_AMD64Instr()
1736 mapRegs_AMD64AMode(m, i->Ain.A87StSW.addr); in mapRegs_AMD64Instr()
1739 mapRegs_AMD64AMode(m, i->Ain.LdMXCSR.addr); in mapRegs_AMD64Instr()
1742 mapReg(m, &i->Ain.SseUComIS.srcL); in mapRegs_AMD64Instr()
1743 mapReg(m, &i->Ain.SseUComIS.srcR); in mapRegs_AMD64Instr()
1744 mapReg(m, &i->Ain.SseUComIS.dst); in mapRegs_AMD64Instr()
1747 mapReg(m, &i->Ain.SseSI2SF.src); in mapRegs_AMD64Instr()
1748 mapReg(m, &i->Ain.SseSI2SF.dst); in mapRegs_AMD64Instr()
1751 mapReg(m, &i->Ain.SseSF2SI.src); in mapRegs_AMD64Instr()
1752 mapReg(m, &i->Ain.SseSF2SI.dst); in mapRegs_AMD64Instr()
1755 mapReg(m, &i->Ain.SseSDSS.src); in mapRegs_AMD64Instr()
1756 mapReg(m, &i->Ain.SseSDSS.dst); in mapRegs_AMD64Instr()
1759 mapReg(m, &i->Ain.SseLdSt.reg); in mapRegs_AMD64Instr()
1760 mapRegs_AMD64AMode(m, i->Ain.SseLdSt.addr); in mapRegs_AMD64Instr()
1763 mapReg(m, &i->Ain.SseLdzLO.reg); in mapRegs_AMD64Instr()
1764 mapRegs_AMD64AMode(m, i->Ain.SseLdzLO.addr); in mapRegs_AMD64Instr()
1767 mapReg(m, &i->Ain.Sse32Fx4.src); in mapRegs_AMD64Instr()
1768 mapReg(m, &i->Ain.Sse32Fx4.dst); in mapRegs_AMD64Instr()
1771 mapReg(m, &i->Ain.Sse32FLo.src); in mapRegs_AMD64Instr()
1772 mapReg(m, &i->Ain.Sse32FLo.dst); in mapRegs_AMD64Instr()
1775 mapReg(m, &i->Ain.Sse64Fx2.src); in mapRegs_AMD64Instr()
1776 mapReg(m, &i->Ain.Sse64Fx2.dst); in mapRegs_AMD64Instr()
1779 mapReg(m, &i->Ain.Sse64FLo.src); in mapRegs_AMD64Instr()
1780 mapReg(m, &i->Ain.Sse64FLo.dst); in mapRegs_AMD64Instr()
1783 mapReg(m, &i->Ain.SseReRg.src); in mapRegs_AMD64Instr()
1784 mapReg(m, &i->Ain.SseReRg.dst); in mapRegs_AMD64Instr()
1787 mapReg(m, &i->Ain.SseCMov.src); in mapRegs_AMD64Instr()
1788 mapReg(m, &i->Ain.SseCMov.dst); in mapRegs_AMD64Instr()
1791 mapReg(m, &i->Ain.SseShuf.src); in mapRegs_AMD64Instr()
1792 mapReg(m, &i->Ain.SseShuf.dst); in mapRegs_AMD64Instr()
1805 mapRegs_AMD64AMode(m, i->Ain.EvCheck.amCounter); in mapRegs_AMD64Instr()
1806 mapRegs_AMD64AMode(m, i->Ain.EvCheck.amFailAddr); in mapRegs_AMD64Instr()
1826 if (i->Ain.Alu64R.op != Aalu_MOV) in isMove_AMD64Instr()
1828 if (i->Ain.Alu64R.src->tag != Armi_Reg) in isMove_AMD64Instr()
1830 *src = i->Ain.Alu64R.src->Armi.Reg.reg; in isMove_AMD64Instr()
1831 *dst = i->Ain.Alu64R.dst; in isMove_AMD64Instr()
1835 if (i->Ain.SseReRg.op != Asse_MOV) in isMove_AMD64Instr()
1837 *src = i->Ain.SseReRg.src; in isMove_AMD64Instr()
1838 *dst = i->Ain.SseReRg.dst; in isMove_AMD64Instr()
2294 if (i->Ain.Imm64.imm64 <= 0xFFFFFULL) { in emit_AMD64Instr()
2299 if (1 & iregBit3(i->Ain.Imm64.dst)) in emit_AMD64Instr()
2301 *p++ = 0xB8 + iregBits210(i->Ain.Imm64.dst); in emit_AMD64Instr()
2302 p = emit32(p, (UInt)i->Ain.Imm64.imm64); in emit_AMD64Instr()
2304 *p++ = toUChar(0x48 + (1 & iregBit3(i->Ain.Imm64.dst))); in emit_AMD64Instr()
2305 *p++ = toUChar(0xB8 + iregBits210(i->Ain.Imm64.dst)); in emit_AMD64Instr()
2306 p = emit64(p, i->Ain.Imm64.imm64); in emit_AMD64Instr()
2312 if (i->Ain.Alu64R.op == Aalu_MOV) { in emit_AMD64Instr()
2313 switch (i->Ain.Alu64R.src->tag) { in emit_AMD64Instr()
2315 if (0 == (i->Ain.Alu64R.src->Armi.Imm.imm32 & ~0xFFFFF)) { in emit_AMD64Instr()
2325 if (1 & iregBit3(i->Ain.Alu64R.dst)) in emit_AMD64Instr()
2327 *p++ = 0xB8 + iregBits210(i->Ain.Alu64R.dst); in emit_AMD64Instr()
2328 p = emit32(p, i->Ain.Alu64R.src->Armi.Imm.imm32); in emit_AMD64Instr()
2330 *p++ = toUChar(0x48 + (1 & iregBit3(i->Ain.Alu64R.dst))); in emit_AMD64Instr()
2332 *p++ = toUChar(0xC0 + iregBits210(i->Ain.Alu64R.dst)); in emit_AMD64Instr()
2333 p = emit32(p, i->Ain.Alu64R.src->Armi.Imm.imm32); in emit_AMD64Instr()
2337 *p++ = rexAMode_R( i->Ain.Alu64R.src->Armi.Reg.reg, in emit_AMD64Instr()
2338 i->Ain.Alu64R.dst ); in emit_AMD64Instr()
2340 p = doAMode_R(p, i->Ain.Alu64R.src->Armi.Reg.reg, in emit_AMD64Instr()
2341 i->Ain.Alu64R.dst); in emit_AMD64Instr()
2344 *p++ = rexAMode_M(i->Ain.Alu64R.dst, in emit_AMD64Instr()
2345 i->Ain.Alu64R.src->Armi.Mem.am); in emit_AMD64Instr()
2347 p = doAMode_M(p, i->Ain.Alu64R.dst, in emit_AMD64Instr()
2348 i->Ain.Alu64R.src->Armi.Mem.am); in emit_AMD64Instr()
2355 if (i->Ain.Alu64R.op == Aalu_MUL) { in emit_AMD64Instr()
2356 switch (i->Ain.Alu64R.src->tag) { in emit_AMD64Instr()
2358 *p++ = rexAMode_R( i->Ain.Alu64R.dst, in emit_AMD64Instr()
2359 i->Ain.Alu64R.src->Armi.Reg.reg); in emit_AMD64Instr()
2362 p = doAMode_R(p, i->Ain.Alu64R.dst, in emit_AMD64Instr()
2363 i->Ain.Alu64R.src->Armi.Reg.reg); in emit_AMD64Instr()
2366 *p++ = rexAMode_M(i->Ain.Alu64R.dst, in emit_AMD64Instr()
2367 i->Ain.Alu64R.src->Armi.Mem.am); in emit_AMD64Instr()
2370 p = doAMode_M(p, i->Ain.Alu64R.dst, in emit_AMD64Instr()
2371 i->Ain.Alu64R.src->Armi.Mem.am); in emit_AMD64Instr()
2374 if (fits8bits(i->Ain.Alu64R.src->Armi.Imm.imm32)) { in emit_AMD64Instr()
2375 *p++ = rexAMode_R(i->Ain.Alu64R.dst, i->Ain.Alu64R.dst); in emit_AMD64Instr()
2377 p = doAMode_R(p, i->Ain.Alu64R.dst, i->Ain.Alu64R.dst); in emit_AMD64Instr()
2378 *p++ = toUChar(0xFF & i->Ain.Alu64R.src->Armi.Imm.imm32); in emit_AMD64Instr()
2380 *p++ = rexAMode_R(i->Ain.Alu64R.dst, i->Ain.Alu64R.dst); in emit_AMD64Instr()
2382 p = doAMode_R(p, i->Ain.Alu64R.dst, i->Ain.Alu64R.dst); in emit_AMD64Instr()
2383 p = emit32(p, i->Ain.Alu64R.src->Armi.Imm.imm32); in emit_AMD64Instr()
2392 switch (i->Ain.Alu64R.op) { in emit_AMD64Instr()
2411 switch (i->Ain.Alu64R.src->tag) { in emit_AMD64Instr()
2413 if (sameHReg(i->Ain.Alu64R.dst, hregAMD64_RAX()) in emit_AMD64Instr()
2414 && !fits8bits(i->Ain.Alu64R.src->Armi.Imm.imm32)) { in emit_AMD64Instr()
2417 p = emit32(p, i->Ain.Alu64R.src->Armi.Imm.imm32); in emit_AMD64Instr()
2419 if (fits8bits(i->Ain.Alu64R.src->Armi.Imm.imm32)) { in emit_AMD64Instr()
2420 *p++ = rexAMode_R( fake(0), i->Ain.Alu64R.dst ); in emit_AMD64Instr()
2422 p = doAMode_R(p, fake(subopc_imm), i->Ain.Alu64R.dst); in emit_AMD64Instr()
2423 *p++ = toUChar(0xFF & i->Ain.Alu64R.src->Armi.Imm.imm32); in emit_AMD64Instr()
2425 *p++ = rexAMode_R( fake(0), i->Ain.Alu64R.dst); in emit_AMD64Instr()
2427 p = doAMode_R(p, fake(subopc_imm), i->Ain.Alu64R.dst); in emit_AMD64Instr()
2428 p = emit32(p, i->Ain.Alu64R.src->Armi.Imm.imm32); in emit_AMD64Instr()
2432 *p++ = rexAMode_R( i->Ain.Alu64R.src->Armi.Reg.reg, in emit_AMD64Instr()
2433 i->Ain.Alu64R.dst); in emit_AMD64Instr()
2435 p = doAMode_R(p, i->Ain.Alu64R.src->Armi.Reg.reg, in emit_AMD64Instr()
2436 i->Ain.Alu64R.dst); in emit_AMD64Instr()
2439 *p++ = rexAMode_M( i->Ain.Alu64R.dst, in emit_AMD64Instr()
2440 i->Ain.Alu64R.src->Armi.Mem.am); in emit_AMD64Instr()
2442 p = doAMode_M(p, i->Ain.Alu64R.dst, in emit_AMD64Instr()
2443 i->Ain.Alu64R.src->Armi.Mem.am); in emit_AMD64Instr()
2452 if (i->Ain.Alu64M.op == Aalu_MOV) { in emit_AMD64Instr()
2453 switch (i->Ain.Alu64M.src->tag) { in emit_AMD64Instr()
2455 *p++ = rexAMode_M(i->Ain.Alu64M.src->Ari.Reg.reg, in emit_AMD64Instr()
2456 i->Ain.Alu64M.dst); in emit_AMD64Instr()
2458 p = doAMode_M(p, i->Ain.Alu64M.src->Ari.Reg.reg, in emit_AMD64Instr()
2459 i->Ain.Alu64M.dst); in emit_AMD64Instr()
2462 *p++ = rexAMode_M(fake(0), i->Ain.Alu64M.dst); in emit_AMD64Instr()
2464 p = doAMode_M(p, fake(0), i->Ain.Alu64M.dst); in emit_AMD64Instr()
2465 p = emit32(p, i->Ain.Alu64M.src->Ari.Imm.imm32); in emit_AMD64Instr()
2475 switch (i->Ain.Sh64.op) { in emit_AMD64Instr()
2481 if (i->Ain.Sh64.src == 0) { in emit_AMD64Instr()
2482 *p++ = rexAMode_R(fake(0), i->Ain.Sh64.dst); in emit_AMD64Instr()
2484 p = doAMode_R(p, fake(subopc), i->Ain.Sh64.dst); in emit_AMD64Instr()
2487 *p++ = rexAMode_R(fake(0), i->Ain.Sh64.dst); in emit_AMD64Instr()
2489 p = doAMode_R(p, fake(subopc), i->Ain.Sh64.dst); in emit_AMD64Instr()
2490 *p++ = (UChar)(i->Ain.Sh64.src); in emit_AMD64Instr()
2497 *p++ = rexAMode_R(fake(0), i->Ain.Test64.dst); in emit_AMD64Instr()
2499 p = doAMode_R(p, fake(0), i->Ain.Test64.dst); in emit_AMD64Instr()
2500 p = emit32(p, i->Ain.Test64.imm32); in emit_AMD64Instr()
2504 if (i->Ain.Unary64.op == Aun_NOT) { in emit_AMD64Instr()
2505 *p++ = rexAMode_R(fake(0), i->Ain.Unary64.dst); in emit_AMD64Instr()
2507 p = doAMode_R(p, fake(2), i->Ain.Unary64.dst); in emit_AMD64Instr()
2510 if (i->Ain.Unary64.op == Aun_NEG) { in emit_AMD64Instr()
2511 *p++ = rexAMode_R(fake(0), i->Ain.Unary64.dst); in emit_AMD64Instr()
2513 p = doAMode_R(p, fake(3), i->Ain.Unary64.dst); in emit_AMD64Instr()
2519 *p++ = rexAMode_M(i->Ain.Lea64.dst, i->Ain.Lea64.am); in emit_AMD64Instr()
2521 p = doAMode_M(p, i->Ain.Lea64.dst, i->Ain.Lea64.am); in emit_AMD64Instr()
2527 switch (i->Ain.Alu32R.op) { in emit_AMD64Instr()
2542 switch (i->Ain.Alu32R.src->tag) { in emit_AMD64Instr()
2544 if (sameHReg(i->Ain.Alu32R.dst, hregAMD64_RAX()) in emit_AMD64Instr()
2545 && !fits8bits(i->Ain.Alu32R.src->Armi.Imm.imm32)) { in emit_AMD64Instr()
2548 p = emit32(p, i->Ain.Alu32R.src->Armi.Imm.imm32); in emit_AMD64Instr()
2550 if (fits8bits(i->Ain.Alu32R.src->Armi.Imm.imm32)) { in emit_AMD64Instr()
2551 rex = clearWBit( rexAMode_R( fake(0), i->Ain.Alu32R.dst ) ); in emit_AMD64Instr()
2554 p = doAMode_R(p, fake(subopc_imm), i->Ain.Alu32R.dst); in emit_AMD64Instr()
2555 *p++ = toUChar(0xFF & i->Ain.Alu32R.src->Armi.Imm.imm32); in emit_AMD64Instr()
2557 rex = clearWBit( rexAMode_R( fake(0), i->Ain.Alu32R.dst) ); in emit_AMD64Instr()
2560 p = doAMode_R(p, fake(subopc_imm), i->Ain.Alu32R.dst); in emit_AMD64Instr()
2561 p = emit32(p, i->Ain.Alu32R.src->Armi.Imm.imm32); in emit_AMD64Instr()
2566 rexAMode_R( i->Ain.Alu32R.src->Armi.Reg.reg, in emit_AMD64Instr()
2567 i->Ain.Alu32R.dst) ); in emit_AMD64Instr()
2570 p = doAMode_R(p, i->Ain.Alu32R.src->Armi.Reg.reg, in emit_AMD64Instr()
2571 i->Ain.Alu32R.dst); in emit_AMD64Instr()
2575 rexAMode_M( i->Ain.Alu32R.dst, in emit_AMD64Instr()
2576 i->Ain.Alu32R.src->Armi.Mem.am) ); in emit_AMD64Instr()
2579 p = doAMode_M(p, i->Ain.Alu32R.dst, in emit_AMD64Instr()
2580 i->Ain.Alu32R.src->Armi.Mem.am); in emit_AMD64Instr()
2588 subopc = i->Ain.MulL.syned ? 5 : 4; in emit_AMD64Instr()
2589 switch (i->Ain.MulL.src->tag) { in emit_AMD64Instr()
2592 i->Ain.MulL.src->Arm.Mem.am); in emit_AMD64Instr()
2595 i->Ain.MulL.src->Arm.Mem.am); in emit_AMD64Instr()
2599 i->Ain.MulL.src->Arm.Reg.reg); in emit_AMD64Instr()
2602 i->Ain.MulL.src->Arm.Reg.reg); in emit_AMD64Instr()
2610 subopc = i->Ain.Div.syned ? 7 : 6; in emit_AMD64Instr()
2611 if (i->Ain.Div.sz == 4) { in emit_AMD64Instr()
2612 switch (i->Ain.Div.src->tag) { in emit_AMD64Instr()
2618 i->Ain.Div.src->Arm.Mem.am); in emit_AMD64Instr()
2622 rexAMode_R( fake(0), i->Ain.Div.src->Arm.Reg.reg)); in emit_AMD64Instr()
2625 i->Ain.Div.src->Arm.Reg.reg); in emit_AMD64Instr()
2631 if (i->Ain.Div.sz == 8) { in emit_AMD64Instr()
2632 switch (i->Ain.Div.src->tag) { in emit_AMD64Instr()
2635 i->Ain.Div.src->Arm.Mem.am); in emit_AMD64Instr()
2638 i->Ain.Div.src->Arm.Mem.am); in emit_AMD64Instr()
2642 i->Ain.Div.src->Arm.Reg.reg); in emit_AMD64Instr()
2645 i->Ain.Div.src->Arm.Reg.reg); in emit_AMD64Instr()
2654 switch (i->Ain.Push.src->tag) { in emit_AMD64Instr()
2657 rexAMode_M(fake(0), i->Ain.Push.src->Armi.Mem.am)); in emit_AMD64Instr()
2659 p = doAMode_M(p, fake(6), i->Ain.Push.src->Armi.Mem.am); in emit_AMD64Instr()
2663 p = emit32(p, i->Ain.Push.src->Armi.Imm.imm32); in emit_AMD64Instr()
2666 *p++ = toUChar(0x40 + (1 & iregBit3(i->Ain.Push.src->Armi.Reg.reg))); in emit_AMD64Instr()
2667 *p++ = toUChar(0x50 + iregBits210(i->Ain.Push.src->Armi.Reg.reg)); in emit_AMD64Instr()
2674 if (i->Ain.Call.cond != Acc_ALWAYS in emit_AMD64Instr()
2675 && i->Ain.Call.rloc.pri != RLPri_None) { in emit_AMD64Instr()
2689 Bool shortImm = fitsIn32Bits(i->Ain.Call.target); in emit_AMD64Instr()
2690 if (i->Ain.Call.cond != Acc_ALWAYS) { in emit_AMD64Instr()
2691 *p++ = toUChar(0x70 + (0xF & (i->Ain.Call.cond ^ 1))); in emit_AMD64Instr()
2700 p = emit32(p, (UInt)i->Ain.Call.target); in emit_AMD64Instr()
2705 p = emit64(p, i->Ain.Call.target); in emit_AMD64Instr()
2730 if (i->Ain.XDirect.cond != Acc_ALWAYS) { in emit_AMD64Instr()
2732 *p++ = toUChar(0x70 + (0xF & (i->Ain.XDirect.cond ^ 1))); in emit_AMD64Instr()
2738 if (fitsIn32Bits(i->Ain.XDirect.dstGA)) { in emit_AMD64Instr()
2744 p = emit32(p, (UInt)i->Ain.XDirect.dstGA); in emit_AMD64Instr()
2749 p = emit64(p, i->Ain.XDirect.dstGA); in emit_AMD64Instr()
2753 *p++ = rexAMode_M(r11, i->Ain.XDirect.amRIP); in emit_AMD64Instr()
2755 p = doAMode_M(p, r11, i->Ain.XDirect.amRIP); in emit_AMD64Instr()
2766 = i->Ain.XDirect.toFastEP ? disp_cp_chain_me_to_fastEP in emit_AMD64Instr()
2776 if (i->Ain.XDirect.cond != Acc_ALWAYS) { in emit_AMD64Instr()
2798 if (i->Ain.XIndir.cond != Acc_ALWAYS) { in emit_AMD64Instr()
2800 *p++ = toUChar(0x70 + (0xF & (i->Ain.XIndir.cond ^ 1))); in emit_AMD64Instr()
2806 *p++ = rexAMode_M(i->Ain.XIndir.dstGA, i->Ain.XIndir.amRIP); in emit_AMD64Instr()
2808 p = doAMode_M(p, i->Ain.XIndir.dstGA, i->Ain.XIndir.amRIP); in emit_AMD64Instr()
2831 if (i->Ain.XIndir.cond != Acc_ALWAYS) { in emit_AMD64Instr()
2845 if (i->Ain.XAssisted.cond != Acc_ALWAYS) { in emit_AMD64Instr()
2847 *p++ = toUChar(0x70 + (0xF & (i->Ain.XAssisted.cond ^ 1))); in emit_AMD64Instr()
2853 *p++ = rexAMode_M(i->Ain.XAssisted.dstGA, i->Ain.XAssisted.amRIP); in emit_AMD64Instr()
2855 p = doAMode_M(p, i->Ain.XAssisted.dstGA, i->Ain.XAssisted.amRIP); in emit_AMD64Instr()
2860 switch (i->Ain.XAssisted.jk) { in emit_AMD64Instr()
2878 ppIRJumpKind(i->Ain.XAssisted.jk); in emit_AMD64Instr()
2894 if (i->Ain.XAssisted.cond != Acc_ALWAYS) { in emit_AMD64Instr()
2903 vassert(i->Ain.CMov64.cond != Acc_ALWAYS); in emit_AMD64Instr()
2904 if (i->Ain.CMov64.src->tag == Arm_Reg) { in emit_AMD64Instr()
2905 *p++ = rexAMode_R(i->Ain.CMov64.dst, i->Ain.CMov64.src->Arm.Reg.reg); in emit_AMD64Instr()
2907 *p++ = toUChar(0x40 + (0xF & i->Ain.CMov64.cond)); in emit_AMD64Instr()
2908 p = doAMode_R(p, i->Ain.CMov64.dst, i->Ain.CMov64.src->Arm.Reg.reg); in emit_AMD64Instr()
2911 if (i->Ain.CMov64.src->tag == Arm_Mem) { in emit_AMD64Instr()
2912 *p++ = rexAMode_M(i->Ain.CMov64.dst, i->Ain.CMov64.src->Arm.Mem.am); in emit_AMD64Instr()
2914 *p++ = toUChar(0x40 + (0xF & i->Ain.CMov64.cond)); in emit_AMD64Instr()
2915 p = doAMode_M(p, i->Ain.CMov64.dst, i->Ain.CMov64.src->Arm.Mem.am); in emit_AMD64Instr()
2923 if (i->Ain.MovxLQ.syned) { in emit_AMD64Instr()
2925 *p++ = rexAMode_R(i->Ain.MovxLQ.dst, i->Ain.MovxLQ.src); in emit_AMD64Instr()
2927 p = doAMode_R(p, i->Ain.MovxLQ.dst, i->Ain.MovxLQ.src); in emit_AMD64Instr()
2932 rexAMode_R(i->Ain.MovxLQ.src, i->Ain.MovxLQ.dst)); in emit_AMD64Instr()
2934 p = doAMode_R(p, i->Ain.MovxLQ.src, i->Ain.MovxLQ.dst); in emit_AMD64Instr()
2939 if (i->Ain.LoadEX.szSmall == 1 && !i->Ain.LoadEX.syned) { in emit_AMD64Instr()
2941 *p++ = rexAMode_M(i->Ain.LoadEX.dst, i->Ain.LoadEX.src); in emit_AMD64Instr()
2944 p = doAMode_M(p, i->Ain.LoadEX.dst, i->Ain.LoadEX.src); in emit_AMD64Instr()
2947 if (i->Ain.LoadEX.szSmall == 2 && !i->Ain.LoadEX.syned) { in emit_AMD64Instr()
2949 *p++ = rexAMode_M(i->Ain.LoadEX.dst, i->Ain.LoadEX.src); in emit_AMD64Instr()
2952 p = doAMode_M(p, i->Ain.LoadEX.dst, i->Ain.LoadEX.src); in emit_AMD64Instr()
2955 if (i->Ain.LoadEX.szSmall == 4 && !i->Ain.LoadEX.syned) { in emit_AMD64Instr()
2962 rexAMode_M(i->Ain.LoadEX.dst, i->Ain.LoadEX.src)); in emit_AMD64Instr()
2964 p = doAMode_M(p, i->Ain.LoadEX.dst, i->Ain.LoadEX.src); in emit_AMD64Instr()
2975 reg = iregBits3210(i->Ain.Set64.dst); in emit_AMD64Instr()
2988 *p++ = toUChar(0x90 + (0x0F & i->Ain.Set64.cond)); in emit_AMD64Instr()
2993 *p++ = rexAMode_R(i->Ain.Bsfr64.dst, i->Ain.Bsfr64.src); in emit_AMD64Instr()
2995 if (i->Ain.Bsfr64.isFwds) { in emit_AMD64Instr()
3000 p = doAMode_R(p, i->Ain.Bsfr64.dst, i->Ain.Bsfr64.src); in emit_AMD64Instr()
3011 if (i->Ain.ACAS.sz == 2) *p++ = 0x66; in emit_AMD64Instr()
3017 rex = rexAMode_M( hregAMD64_RBX(), i->Ain.ACAS.addr ); in emit_AMD64Instr()
3018 if (i->Ain.ACAS.sz != 8) in emit_AMD64Instr()
3023 if (i->Ain.ACAS.sz == 1) *p++ = 0xB0; else *p++ = 0xB1; in emit_AMD64Instr()
3024 p = doAMode_M(p, hregAMD64_RBX(), i->Ain.ACAS.addr); in emit_AMD64Instr()
3033 rex = rexAMode_M( fake(1), i->Ain.ACAS.addr ); in emit_AMD64Instr()
3034 if (i->Ain.ACAS.sz != 8) in emit_AMD64Instr()
3039 p = doAMode_M(p, fake(1), i->Ain.DACAS.addr); in emit_AMD64Instr()
3043 vassert(i->Ain.A87Free.nregs > 0 && i->Ain.A87Free.nregs <= 7); in emit_AMD64Instr()
3044 for (j = 0; j < i->Ain.A87Free.nregs; j++) { in emit_AMD64Instr()
3050 vassert(i->Ain.A87PushPop.szB == 8 || i->Ain.A87PushPop.szB == 4); in emit_AMD64Instr()
3051 if (i->Ain.A87PushPop.isPush) { in emit_AMD64Instr()
3054 rexAMode_M(fake(0), i->Ain.A87PushPop.addr) ); in emit_AMD64Instr()
3055 *p++ = i->Ain.A87PushPop.szB == 4 ? 0xD9 : 0xDD; in emit_AMD64Instr()
3056 p = doAMode_M(p, fake(0)/*subopcode*/, i->Ain.A87PushPop.addr); in emit_AMD64Instr()
3060 rexAMode_M(fake(3), i->Ain.A87PushPop.addr) ); in emit_AMD64Instr()
3061 *p++ = i->Ain.A87PushPop.szB == 4 ? 0xD9 : 0xDD; in emit_AMD64Instr()
3062 p = doAMode_M(p, fake(3)/*subopcode*/, i->Ain.A87PushPop.addr); in emit_AMD64Instr()
3068 switch (i->Ain.A87FpOp.op) { in emit_AMD64Instr()
3103 rexAMode_M(fake(5), i->Ain.A87LdCW.addr) ); in emit_AMD64Instr()
3105 p = doAMode_M(p, fake(5)/*subopcode*/, i->Ain.A87LdCW.addr); in emit_AMD64Instr()
3110 rexAMode_M(fake(7), i->Ain.A87StSW.addr) ); in emit_AMD64Instr()
3112 p = doAMode_M(p, fake(7)/*subopcode*/, i->Ain.A87StSW.addr); in emit_AMD64Instr()
3116 if (i->Ain.Store.sz == 2) { in emit_AMD64Instr()
3122 *p++ = clearWBit( rexAMode_M( i->Ain.Store.src, i->Ain.Store.dst) ); in emit_AMD64Instr()
3124 p = doAMode_M(p, i->Ain.Store.src, i->Ain.Store.dst); in emit_AMD64Instr()
3127 if (i->Ain.Store.sz == 4) { in emit_AMD64Instr()
3128 *p++ = clearWBit( rexAMode_M( i->Ain.Store.src, i->Ain.Store.dst) ); in emit_AMD64Instr()
3130 p = doAMode_M(p, i->Ain.Store.src, i->Ain.Store.dst); in emit_AMD64Instr()
3133 if (i->Ain.Store.sz == 1) { in emit_AMD64Instr()
3137 *p++ = clearWBit( rexAMode_M( i->Ain.Store.src, i->Ain.Store.dst) ); in emit_AMD64Instr()
3139 p = doAMode_M(p, i->Ain.Store.src, i->Ain.Store.dst); in emit_AMD64Instr()
3145 *p++ = clearWBit(rexAMode_M( fake(0), i->Ain.LdMXCSR.addr)); in emit_AMD64Instr()
3148 p = doAMode_M(p, fake(2)/*subopcode*/, i->Ain.LdMXCSR.addr); in emit_AMD64Instr()
3154 if (i->Ain.SseUComIS.sz == 8) { in emit_AMD64Instr()
3158 vassert(i->Ain.SseUComIS.sz == 4); in emit_AMD64Instr()
3161 rexAMode_R( vreg2ireg(i->Ain.SseUComIS.srcL), in emit_AMD64Instr()
3162 vreg2ireg(i->Ain.SseUComIS.srcR) )); in emit_AMD64Instr()
3165 p = doAMode_R(p, vreg2ireg(i->Ain.SseUComIS.srcL), in emit_AMD64Instr()
3166 vreg2ireg(i->Ain.SseUComIS.srcR) ); in emit_AMD64Instr()
3170 *p++ = toUChar(0x40 + (1 & iregBit3(i->Ain.SseUComIS.dst))); in emit_AMD64Instr()
3171 *p++ = toUChar(0x58 + iregBits210(i->Ain.SseUComIS.dst)); in emit_AMD64Instr()
3176 rex = rexAMode_R( vreg2ireg(i->Ain.SseSI2SF.dst), in emit_AMD64Instr()
3177 i->Ain.SseSI2SF.src ); in emit_AMD64Instr()
3178 *p++ = toUChar(i->Ain.SseSI2SF.szD==4 ? 0xF3 : 0xF2); in emit_AMD64Instr()
3179 *p++ = toUChar(i->Ain.SseSI2SF.szS==4 ? clearWBit(rex) : rex); in emit_AMD64Instr()
3182 p = doAMode_R( p, vreg2ireg(i->Ain.SseSI2SF.dst), in emit_AMD64Instr()
3183 i->Ain.SseSI2SF.src ); in emit_AMD64Instr()
3188 rex = rexAMode_R( i->Ain.SseSF2SI.dst, in emit_AMD64Instr()
3189 vreg2ireg(i->Ain.SseSF2SI.src) ); in emit_AMD64Instr()
3190 *p++ = toUChar(i->Ain.SseSF2SI.szS==4 ? 0xF3 : 0xF2); in emit_AMD64Instr()
3191 *p++ = toUChar(i->Ain.SseSF2SI.szD==4 ? clearWBit(rex) : rex); in emit_AMD64Instr()
3194 p = doAMode_R( p, i->Ain.SseSF2SI.dst, in emit_AMD64Instr()
3195 vreg2ireg(i->Ain.SseSF2SI.src) ); in emit_AMD64Instr()
3200 *p++ = toUChar(i->Ain.SseSDSS.from64 ? 0xF2 : 0xF3); in emit_AMD64Instr()
3202 rexAMode_R( vreg2ireg(i->Ain.SseSDSS.dst), in emit_AMD64Instr()
3203 vreg2ireg(i->Ain.SseSDSS.src) )); in emit_AMD64Instr()
3206 p = doAMode_R( p, vreg2ireg(i->Ain.SseSDSS.dst), in emit_AMD64Instr()
3207 vreg2ireg(i->Ain.SseSDSS.src) ); in emit_AMD64Instr()
3211 if (i->Ain.SseLdSt.sz == 8) { in emit_AMD64Instr()
3214 if (i->Ain.SseLdSt.sz == 4) { in emit_AMD64Instr()
3217 if (i->Ain.SseLdSt.sz != 16) { in emit_AMD64Instr()
3221 rexAMode_M( vreg2ireg(i->Ain.SseLdSt.reg), i->Ain.SseLdSt.addr)); in emit_AMD64Instr()
3223 *p++ = toUChar(i->Ain.SseLdSt.isLoad ? 0x10 : 0x11); in emit_AMD64Instr()
3224 p = doAMode_M(p, vreg2ireg(i->Ain.SseLdSt.reg), i->Ain.SseLdSt.addr); in emit_AMD64Instr()
3228 vassert(i->Ain.SseLdzLO.sz == 4 || i->Ain.SseLdzLO.sz == 8); in emit_AMD64Instr()
3230 *p++ = toUChar(i->Ain.SseLdzLO.sz==4 ? 0xF3 : 0xF2); in emit_AMD64Instr()
3232 rexAMode_M(vreg2ireg(i->Ain.SseLdzLO.reg), in emit_AMD64Instr()
3233 i->Ain.SseLdzLO.addr)); in emit_AMD64Instr()
3236 p = doAMode_M(p, vreg2ireg(i->Ain.SseLdzLO.reg), in emit_AMD64Instr()
3237 i->Ain.SseLdzLO.addr); in emit_AMD64Instr()
3243 rexAMode_R( vreg2ireg(i->Ain.Sse32Fx4.dst), in emit_AMD64Instr()
3244 vreg2ireg(i->Ain.Sse32Fx4.src) )); in emit_AMD64Instr()
3246 switch (i->Ain.Sse32Fx4.op) { in emit_AMD64Instr()
3262 p = doAMode_R(p, vreg2ireg(i->Ain.Sse32Fx4.dst), in emit_AMD64Instr()
3263 vreg2ireg(i->Ain.Sse32Fx4.src) ); in emit_AMD64Instr()
3272 rexAMode_R( vreg2ireg(i->Ain.Sse64Fx2.dst), in emit_AMD64Instr()
3273 vreg2ireg(i->Ain.Sse64Fx2.src) )); in emit_AMD64Instr()
3275 switch (i->Ain.Sse64Fx2.op) { in emit_AMD64Instr()
3289 p = doAMode_R(p, vreg2ireg(i->Ain.Sse64Fx2.dst), in emit_AMD64Instr()
3290 vreg2ireg(i->Ain.Sse64Fx2.src) ); in emit_AMD64Instr()
3299 rexAMode_R( vreg2ireg(i->Ain.Sse32FLo.dst), in emit_AMD64Instr()
3300 vreg2ireg(i->Ain.Sse32FLo.src) )); in emit_AMD64Instr()
3302 switch (i->Ain.Sse32FLo.op) { in emit_AMD64Instr()
3318 p = doAMode_R(p, vreg2ireg(i->Ain.Sse32FLo.dst), in emit_AMD64Instr()
3319 vreg2ireg(i->Ain.Sse32FLo.src) ); in emit_AMD64Instr()
3328 rexAMode_R( vreg2ireg(i->Ain.Sse64FLo.dst), in emit_AMD64Instr()
3329 vreg2ireg(i->Ain.Sse64FLo.src) )); in emit_AMD64Instr()
3331 switch (i->Ain.Sse64FLo.op) { in emit_AMD64Instr()
3345 p = doAMode_R(p, vreg2ireg(i->Ain.Sse64FLo.dst), in emit_AMD64Instr()
3346 vreg2ireg(i->Ain.Sse64FLo.src) ); in emit_AMD64Instr()
3355 rexAMode_R( vreg2ireg(i->Ain.SseReRg.dst), in emit_AMD64Instr()
3356 vreg2ireg(i->Ain.SseReRg.src) )); in emit_AMD64Instr()
3358 switch (i->Ain.SseReRg.op) { in emit_AMD64Instr()
3416 p = doAMode_R(p, vreg2ireg(i->Ain.SseReRg.dst), in emit_AMD64Instr()
3417 vreg2ireg(i->Ain.SseReRg.src) ); in emit_AMD64Instr()
3423 *p++ = toUChar(0x70 + (i->Ain.SseCMov.cond ^ 1)); in emit_AMD64Instr()
3429 rexAMode_R( vreg2ireg(i->Ain.SseCMov.dst), in emit_AMD64Instr()
3430 vreg2ireg(i->Ain.SseCMov.src) )); in emit_AMD64Instr()
3433 p = doAMode_R(p, vreg2ireg(i->Ain.SseCMov.dst), in emit_AMD64Instr()
3434 vreg2ireg(i->Ain.SseCMov.src) ); in emit_AMD64Instr()
3443 rexAMode_R( vreg2ireg(i->Ain.SseShuf.dst), in emit_AMD64Instr()
3444 vreg2ireg(i->Ain.SseShuf.src) )); in emit_AMD64Instr()
3447 p = doAMode_R(p, vreg2ireg(i->Ain.SseShuf.dst), in emit_AMD64Instr()
3448 vreg2ireg(i->Ain.SseShuf.src) ); in emit_AMD64Instr()
3449 *p++ = (UChar)(i->Ain.SseShuf.order); in emit_AMD64Instr()
3483 rex = clearWBit(rexAMode_M(fake(1), i->Ain.EvCheck.amCounter)); in emit_AMD64Instr()
3486 p = doAMode_M(p, fake(1), i->Ain.EvCheck.amCounter); in emit_AMD64Instr()
3496 rex = clearWBit(rexAMode_M(fake(4), i->Ain.EvCheck.amFailAddr)); in emit_AMD64Instr()
3499 p = doAMode_M(p, fake(4), i->Ain.EvCheck.amFailAddr); in emit_AMD64Instr()