Lines Matching refs:dst
544 void popq(Register dst);
545 void popq(Operand dst);
552 void movb(Register dst, Operand src);
553 void movb(Register dst, Immediate imm);
554 void movb(Operand dst, Register src);
555 void movb(Operand dst, Immediate imm);
559 void movw(Register dst, Operand src);
560 void movw(Operand dst, Register src);
561 void movw(Operand dst, Immediate imm);
565 void movl(Operand dst, Label* src);
574 void movq_heap_number(Register dst, double value);
576 void movq_string(Register dst, const StringConstantBase* str);
580 void movq(Register dst, int64_t value) { movq(dst, Immediate64(value)); } in movq() argument
581 void movq(Register dst, uint64_t value) { in movq() argument
582 movq(dst, Immediate64(static_cast<int64_t>(value))); in movq()
586 void movq_imm64(Register dst, int64_t value);
588 void movsxbl(Register dst, Register src);
589 void movsxbl(Register dst, Operand src);
590 void movsxbq(Register dst, Register src);
591 void movsxbq(Register dst, Operand src);
592 void movsxwl(Register dst, Register src);
593 void movsxwl(Register dst, Operand src);
594 void movsxwq(Register dst, Register src);
595 void movsxwq(Register dst, Operand src);
596 void movsxlq(Register dst, Register src);
597 void movsxlq(Register dst, Operand src);
615 void cmovq(Condition cc, Register dst, Register src);
616 void cmovq(Condition cc, Register dst, Operand src);
617 void cmovl(Condition cc, Register dst, Register src);
618 void cmovl(Condition cc, Register dst, Operand src);
620 void cmpb(Register dst, Immediate src) { in cmpb() argument
621 immediate_arithmetic_op_8(0x7, dst, src); in cmpb()
626 void cmpb(Register dst, Register src) { arithmetic_op_8(0x3A, dst, src); } in cmpb() argument
628 void cmpb(Register dst, Operand src) { arithmetic_op_8(0x3A, dst, src); } in cmpb() argument
630 void cmpb(Operand dst, Register src) { arithmetic_op_8(0x38, src, dst); } in cmpb() argument
632 void cmpb(Operand dst, Immediate src) { in cmpb() argument
633 immediate_arithmetic_op_8(0x7, dst, src); in cmpb()
636 void cmpw(Operand dst, Immediate src) { in cmpw() argument
637 immediate_arithmetic_op_16(0x7, dst, src); in cmpw()
640 void cmpw(Register dst, Immediate src) { in cmpw() argument
641 immediate_arithmetic_op_16(0x7, dst, src); in cmpw()
644 void cmpw(Register dst, Operand src) { arithmetic_op_16(0x3B, dst, src); } in cmpw() argument
646 void cmpw(Register dst, Register src) { arithmetic_op_16(0x3B, dst, src); } in cmpw() argument
648 void cmpw(Operand dst, Register src) { arithmetic_op_16(0x39, src, dst); } in cmpw() argument
654 void andb(Register dst, Immediate src) { in andb() argument
655 immediate_arithmetic_op_8(0x4, dst, src); in andb()
658 void decb(Register dst);
659 void decb(Operand dst);
667 void xaddb(Operand dst, Register src);
668 void xaddw(Operand dst, Register src);
669 void xaddl(Operand dst, Register src);
670 void xaddq(Operand dst, Register src);
681 void cmpxchgb(Operand dst, Register src);
682 void cmpxchgw(Operand dst, Register src);
696 void instruction##l(Register dst, Immediate imm8) { \
697 shift(dst, imm8, subcode, kInt32Size); \
700 void instruction##q(Register dst, Immediate imm8) { \
701 shift(dst, imm8, subcode, kInt64Size); \
704 void instruction##l(Operand dst, Immediate imm8) { \
705 shift(dst, imm8, subcode, kInt32Size); \
708 void instruction##q(Operand dst, Immediate imm8) { \
709 shift(dst, imm8, subcode, kInt64Size); \
712 void instruction##l_cl(Register dst) { shift(dst, subcode, kInt32Size); } \
714 void instruction##q_cl(Register dst) { shift(dst, subcode, kInt64Size); } \
716 void instruction##l_cl(Operand dst) { shift(dst, subcode, kInt32Size); } \
718 void instruction##q_cl(Operand dst) { shift(dst, subcode, kInt64Size); }
723 void shld(Register dst, Register src);
726 void shrd(Register dst, Register src);
728 void store_rax(Address dst, RelocInfo::Mode mode);
731 void subb(Register dst, Immediate src) { in subb() argument
732 immediate_arithmetic_op_8(0x5, dst, src); in subb()
737 void testb(Register dst, Register src);
742 void testw(Register dst, Register src);
748 void bswapl(Register dst);
749 void bswapq(Register dst);
750 void btq(Operand dst, Register src);
751 void btsq(Operand dst, Register src);
752 void btsq(Register dst, Immediate imm8);
753 void btrq(Register dst, Immediate imm8);
754 void bsrq(Register dst, Register src);
755 void bsrq(Register dst, Operand src);
756 void bsrl(Register dst, Register src);
757 void bsrl(Register dst, Operand src);
758 void bsfq(Register dst, Register src);
759 void bsfq(Register dst, Operand src);
760 void bsfl(Register dst, Register src);
761 void bsfl(Register dst, Operand src);
774 void pblendw(XMMRegister dst, Operand src, uint8_t mask);
775 void pblendw(XMMRegister dst, XMMRegister src, uint8_t mask);
776 void palignr(XMMRegister dst, Operand src, uint8_t mask);
777 void palignr(XMMRegister dst, XMMRegister src, uint8_t mask);
905 void ucomiss(XMMRegister dst, XMMRegister src);
906 void ucomiss(XMMRegister dst, Operand src);
907 void movaps(XMMRegister dst, XMMRegister src);
908 void movaps(XMMRegister dst, Operand src);
914 void movss(XMMRegister dst, XMMRegister src);
916 void movss(XMMRegister dst, Operand src);
917 void movss(Operand dst, XMMRegister src);
919 void movlps(XMMRegister dst, Operand src);
920 void movlps(Operand dst, XMMRegister src);
922 void movhps(XMMRegister dst, Operand src);
923 void movhps(Operand dst, XMMRegister src);
925 void shufps(XMMRegister dst, XMMRegister src, byte imm8);
927 void cvttss2si(Register dst, Operand src);
928 void cvttss2si(Register dst, XMMRegister src);
929 void cvtlsi2ss(XMMRegister dst, Operand src);
930 void cvtlsi2ss(XMMRegister dst, Register src);
932 void movmskps(Register dst, XMMRegister src);
934 void vinstr(byte op, XMMRegister dst, XMMRegister src1, XMMRegister src2,
936 void vinstr(byte op, XMMRegister dst, XMMRegister src1, Operand src2,
940 void vinstr(byte op, Reg1 dst, Reg2 src1, Op src2, SIMDPrefix pp,
944 void sse_instr(XMMRegister dst, XMMRegister src, byte escape, byte opcode);
945 void sse_instr(XMMRegister dst, Operand src, byte escape, byte opcode);
947 void instruction(XMMRegister dst, XMMRegister src) { \
948 sse_instr(dst, src, 0x##escape, 0x##opcode); \
950 void instruction(XMMRegister dst, Operand src) { \
951 sse_instr(dst, src, 0x##escape, 0x##opcode); \
959 void sse2_instr(XMMRegister dst, XMMRegister src, byte prefix, byte escape,
961 void sse2_instr(XMMRegister dst, Operand src, byte prefix, byte escape,
964 void instruction(XMMRegister dst, XMMRegister src) { \
965 sse2_instr(dst, src, 0x##prefix, 0x##escape, 0x##opcode); \
967 void instruction(XMMRegister dst, Operand src) { \
968 sse2_instr(dst, src, 0x##prefix, 0x##escape, 0x##opcode); \
993 void v##instruction(XMMRegister dst, XMMRegister src1, XMMRegister src2) { \
994 vinstr(0x##opcode, dst, src1, src2, k##prefix, k##escape, kW0); \
996 void v##instruction(XMMRegister dst, XMMRegister src1, Operand src2) { \
997 vinstr(0x##opcode, dst, src1, src2, k##prefix, k##escape, kW0); \
1002 void v##instruction(YMMRegister dst, YMMRegister src1, YMMRegister src2) { \
1003 vinstr(0x##opcode, dst, src1, src2, k##prefix, k##escape, kW0, AVX); \
1005 void v##instruction(YMMRegister dst, YMMRegister src1, Operand src2) { \
1006 vinstr(0x##opcode, dst, src1, src2, k##prefix, k##escape, kW0, AVX); \
1014 void v##instruction(YMMRegister dst, YMMRegister src1, YMMRegister src2) { \
1015 vinstr(0x##opcode, dst, src1, src2, k##prefix, k##escape, kW0, AVX2); \
1017 void v##instruction(YMMRegister dst, YMMRegister src1, Operand src2) { \
1018 vinstr(0x##opcode, dst, src1, src2, k##prefix, k##escape, kW0, AVX2); \
1027 void v##instruction(YMMRegister dst, YMMRegister src1, XMMRegister src2) { \
1028 vinstr(0x##opcode, dst, src1, src2, k##prefix, k##escape, kW0, AVX2); \
1030 void v##instruction(YMMRegister dst, YMMRegister src1, Operand src2) { \
1031 vinstr(0x##opcode, dst, src1, src2, k##prefix, k##escape, kW0, AVX2); \
1039 void v##instruction(XMMRegister dst, XMMRegister src) { \
1040 vpd(0x##opcode, dst, xmm0, src); \
1042 void v##instruction(XMMRegister dst, Operand src) { \
1043 vpd(0x##opcode, dst, xmm0, src); \
1050 void lddqu(XMMRegister dst, Operand src);
1051 void movddup(XMMRegister dst, Operand src);
1052 void movddup(XMMRegister dst, XMMRegister src);
1053 void movshdup(XMMRegister dst, XMMRegister src);
1056 void ssse3_instr(XMMRegister dst, XMMRegister src, byte prefix, byte escape1,
1058 void ssse3_instr(XMMRegister dst, Operand src, byte prefix, byte escape1,
1063 void instruction(XMMRegister dst, XMMRegister src) { \
1064 ssse3_instr(dst, src, 0x##prefix, 0x##escape1, 0x##escape2, 0x##opcode); \
1066 void instruction(XMMRegister dst, Operand src) { \
1067 ssse3_instr(dst, src, 0x##prefix, 0x##escape1, 0x##escape2, 0x##opcode); \
1075 void sse4_instr(Register dst, XMMRegister src, byte prefix, byte escape1,
1077 void sse4_instr(Operand dst, XMMRegister src, byte prefix, byte escape1,
1079 void sse4_instr(XMMRegister dst, Register src, byte prefix, byte escape1,
1081 void sse4_instr(XMMRegister dst, XMMRegister src, byte prefix, byte escape1,
1083 void sse4_instr(XMMRegister dst, Operand src, byte prefix, byte escape1,
1087 void instruction(XMMRegister dst, XMMRegister src) { \
1088 sse4_instr(dst, src, 0x##prefix, 0x##escape1, 0x##escape2, 0x##opcode); \
1090 void instruction(XMMRegister dst, Operand src) { \
1091 sse4_instr(dst, src, 0x##prefix, 0x##escape1, 0x##escape2, 0x##opcode); \
1103 void instruction(Register dst, XMMRegister src, uint8_t imm8) { \
1104 sse4_instr(dst, src, 0x##prefix, 0x##escape1, 0x##escape2, 0x##opcode, \
1107 void instruction(Operand dst, XMMRegister src, uint8_t imm8) { \
1108 sse4_instr(dst, src, 0x##prefix, 0x##escape1, 0x##escape2, 0x##opcode, \
1116 void sse4_2_instr(XMMRegister dst, XMMRegister src, byte prefix, byte escape1,
1118 void sse4_2_instr(XMMRegister dst, Operand src, byte prefix, byte escape1,
1122 void instruction(XMMRegister dst, XMMRegister src) { \
1123 sse4_2_instr(dst, src, 0x##prefix, 0x##escape1, 0x##escape2, 0x##opcode); \
1125 void instruction(XMMRegister dst, Operand src) { \
1126 sse4_2_instr(dst, src, 0x##prefix, 0x##escape1, 0x##escape2, 0x##opcode); \
1134 void v##instruction(XMMRegister dst, XMMRegister src1, XMMRegister src2) { \
1135 vinstr(0x##opcode, dst, src1, src2, k##prefix, k##escape1##escape2, kW0); \
1137 void v##instruction(XMMRegister dst, XMMRegister src1, Operand src2) { \
1138 vinstr(0x##opcode, dst, src1, src2, k##prefix, k##escape1##escape2, kW0); \
1140 void v##instruction(YMMRegister dst, YMMRegister src1, YMMRegister src2) { \
1141 vinstr(0x##opcode, dst, src1, src2, k##prefix, k##escape1##escape2, kW0, \
1144 void v##instruction(YMMRegister dst, YMMRegister src1, Operand src2) { \
1145 vinstr(0x##opcode, dst, src1, src2, k##prefix, k##escape1##escape2, kW0, \
1156 void v##instruction(XMMRegister dst, XMMRegister src) { \ in SSSE3_INSTRUCTION_LIST()
1157 vinstr(0x##opcode, dst, xmm0, src, k##prefix, k##escape1##escape2, kW0); \ in SSSE3_INSTRUCTION_LIST()
1159 void v##instruction(XMMRegister dst, Operand src) { \
1160 vinstr(0x##opcode, dst, xmm0, src, k##prefix, k##escape1##escape2, kW0); \
1162 void v##instruction(YMMRegister dst, YMMRegister src) { \
1163 vinstr(0x##opcode, dst, ymm0, src, k##prefix, k##escape1##escape2, kW0); \
1165 void v##instruction(YMMRegister dst, Operand src) { \
1166 vinstr(0x##opcode, dst, ymm0, src, k##prefix, k##escape1##escape2, kW0); \
1172 void vpblendvb(XMMRegister dst, XMMRegister src1, XMMRegister src2,
1174 vinstr(0x4C, dst, src1, src2, k66, k0F3A, kW0);
1178 void vpblendvb(YMMRegister dst, YMMRegister src1, YMMRegister src2, in vpblendvb() argument
1180 vinstr(0x4C, dst, src1, src2, k66, k0F3A, kW0, AVX2); in vpblendvb()
1185 void vblendvps(XMMRegister dst, XMMRegister src1, XMMRegister src2, in vblendvps() argument
1187 vinstr(0x4A, dst, src1, src2, k66, k0F3A, kW0); in vblendvps()
1191 void vblendvps(YMMRegister dst, YMMRegister src1, YMMRegister src2, in vblendvps() argument
1193 vinstr(0x4A, dst, src1, src2, k66, k0F3A, kW0, AVX); in vblendvps()
1198 void vblendvpd(XMMRegister dst, XMMRegister src1, XMMRegister src2, in vblendvpd() argument
1200 vinstr(0x4B, dst, src1, src2, k66, k0F3A, kW0); in vblendvpd()
1204 void vblendvpd(YMMRegister dst, YMMRegister src1, YMMRegister src2, in vblendvpd() argument
1206 vinstr(0x4B, dst, src1, src2, k66, k0F3A, kW0, AVX); in vblendvpd()
1213 void v##instruction(XMMRegister dst, XMMRegister src) { \
1214 vinstr(0x##opcode, dst, xmm0, src, k##prefix, k##escape1##escape2, kW0); \
1216 void v##instruction(XMMRegister dst, Operand src) { \
1217 vinstr(0x##opcode, dst, xmm0, src, k##prefix, k##escape1##escape2, kW0); \
1223 void v##instruction(Register dst, XMMRegister src, uint8_t imm8) { \
1224 XMMRegister idst = XMMRegister::from_code(dst.code()); \
1228 void v##instruction(Operand dst, XMMRegister src, uint8_t imm8) { \
1229 vinstr(0x##opcode, src, xmm0, dst, k##prefix, k##escape1##escape2, kW0); \
1236 void movd(XMMRegister dst, Register src);
1237 void movd(XMMRegister dst, Operand src);
1238 void movd(Register dst, XMMRegister src);
1239 void movq(XMMRegister dst, Register src);
1240 void movq(XMMRegister dst, Operand src);
1241 void movq(Register dst, XMMRegister src);
1242 void movq(XMMRegister dst, XMMRegister src);
1248 void movsd(XMMRegister dst, XMMRegister src);
1250 void movsd(Operand dst, XMMRegister src);
1251 void movsd(XMMRegister dst, Operand src);
1253 void movdqa(Operand dst, XMMRegister src);
1254 void movdqa(XMMRegister dst, Operand src);
1255 void movdqa(XMMRegister dst, XMMRegister src);
1257 void movdqu(Operand dst, XMMRegister src);
1258 void movdqu(XMMRegister dst, Operand src);
1259 void movdqu(XMMRegister dst, XMMRegister src);
1261 void movapd(XMMRegister dst, XMMRegister src);
1262 void movupd(XMMRegister dst, Operand src);
1263 void movupd(Operand dst, XMMRegister src);
1265 void cvtdq2pd(XMMRegister dst, XMMRegister src);
1267 void cvttsd2si(Register dst, Operand src);
1268 void cvttsd2si(Register dst, XMMRegister src);
1269 void cvttss2siq(Register dst, XMMRegister src);
1270 void cvttss2siq(Register dst, Operand src);
1271 void cvttsd2siq(Register dst, XMMRegister src);
1272 void cvttsd2siq(Register dst, Operand src);
1273 void cvttps2dq(XMMRegister dst, Operand src);
1274 void cvttps2dq(XMMRegister dst, XMMRegister src);
1276 void cvtlsi2sd(XMMRegister dst, Operand src);
1277 void cvtlsi2sd(XMMRegister dst, Register src);
1279 void cvtqsi2ss(XMMRegister dst, Operand src);
1280 void cvtqsi2ss(XMMRegister dst, Register src);
1282 void cvtqsi2sd(XMMRegister dst, Operand src);
1283 void cvtqsi2sd(XMMRegister dst, Register src);
1285 void cvtsd2si(Register dst, XMMRegister src);
1286 void cvtsd2siq(Register dst, XMMRegister src);
1288 void haddps(XMMRegister dst, XMMRegister src);
1289 void haddps(XMMRegister dst, Operand src);
1291 void cmpeqsd(XMMRegister dst, XMMRegister src);
1292 void cmpeqss(XMMRegister dst, XMMRegister src);
1293 void cmpltsd(XMMRegister dst, XMMRegister src);
1295 void movmskpd(Register dst, XMMRegister src);
1297 void pmovmskb(Register dst, XMMRegister src);
1299 void pinsrw(XMMRegister dst, Register src, uint8_t imm8);
1300 void pinsrw(XMMRegister dst, Operand src, uint8_t imm8);
1303 void insertps(XMMRegister dst, XMMRegister src, byte imm8);
1304 void insertps(XMMRegister dst, Operand src, byte imm8);
1305 void pextrq(Register dst, XMMRegister src, int8_t imm8);
1306 void pinsrb(XMMRegister dst, Register src, uint8_t imm8);
1307 void pinsrb(XMMRegister dst, Operand src, uint8_t imm8);
1308 void pinsrd(XMMRegister dst, Register src, uint8_t imm8);
1309 void pinsrd(XMMRegister dst, Operand src, uint8_t imm8);
1310 void pinsrq(XMMRegister dst, Register src, uint8_t imm8);
1311 void pinsrq(XMMRegister dst, Operand src, uint8_t imm8);
1313 void roundss(XMMRegister dst, XMMRegister src, RoundingMode mode);
1314 void roundss(XMMRegister dst, Operand src, RoundingMode mode);
1315 void roundsd(XMMRegister dst, XMMRegister src, RoundingMode mode);
1316 void roundsd(XMMRegister dst, Operand src, RoundingMode mode);
1317 void roundps(XMMRegister dst, XMMRegister src, RoundingMode mode);
1318 void roundpd(XMMRegister dst, XMMRegister src, RoundingMode mode);
1320 void cmpps(XMMRegister dst, XMMRegister src, int8_t cmp);
1321 void cmpps(XMMRegister dst, Operand src, int8_t cmp);
1322 void cmppd(XMMRegister dst, XMMRegister src, int8_t cmp);
1323 void cmppd(XMMRegister dst, Operand src, int8_t cmp);
1326 void instr##ps(XMMRegister dst, XMMRegister src) { cmpps(dst, src, imm8); } \
1327 void instr##ps(XMMRegister dst, Operand src) { cmpps(dst, src, imm8); } \
1328 void instr##pd(XMMRegister dst, XMMRegister src) { cmppd(dst, src, imm8); } \
1329 void instr##pd(XMMRegister dst, Operand src) { cmppd(dst, src, imm8); }
1341 void movups(XMMRegister dst, XMMRegister src);
1342 void movups(XMMRegister dst, Operand src);
1343 void movups(Operand dst, XMMRegister src);
1344 void psrldq(XMMRegister dst, uint8_t shift);
1345 void pshufd(XMMRegister dst, XMMRegister src, uint8_t shuffle);
1346 void pshufd(XMMRegister dst, Operand src, uint8_t shuffle);
1347 void pshufhw(XMMRegister dst, XMMRegister src, uint8_t shuffle);
1348 void pshufhw(XMMRegister dst, Operand src, uint8_t shuffle);
1349 void pshuflw(XMMRegister dst, XMMRegister src, uint8_t shuffle);
1350 void pshuflw(XMMRegister dst, Operand src, uint8_t shuffle);
1352 void movhlps(XMMRegister dst, XMMRegister src) { in movhlps() argument
1353 sse_instr(dst, src, 0x0F, 0x12); in movhlps()
1355 void movlhps(XMMRegister dst, XMMRegister src) { in movlhps() argument
1356 sse_instr(dst, src, 0x0F, 0x16); in movlhps()
1360 void vmovddup(XMMRegister dst, XMMRegister src);
1361 void vmovddup(XMMRegister dst, Operand src);
1362 void vmovddup(YMMRegister dst, YMMRegister src);
1363 void vmovddup(YMMRegister dst, Operand src);
1364 void vmovshdup(XMMRegister dst, XMMRegister src);
1365 void vmovshdup(YMMRegister dst, YMMRegister src);
1366 void vbroadcastss(XMMRegister dst, Operand src);
1367 void vbroadcastss(XMMRegister dst, XMMRegister src);
1368 void vbroadcastss(YMMRegister dst, Operand src);
1369 void vbroadcastss(YMMRegister dst, XMMRegister src);
1371 void fma_instr(byte op, XMMRegister dst, XMMRegister src1, XMMRegister src2,
1373 void fma_instr(byte op, XMMRegister dst, XMMRegister src1, Operand src2,
1377 void instr(XMMRegister dst, XMMRegister src1, XMMRegister src2) { \
1378 fma_instr(0x##opcode, dst, src1, src2, k##length, k##prefix, \
1381 void instr(XMMRegister dst, XMMRegister src1, Operand src2) { \
1382 fma_instr(0x##opcode, dst, src1, src2, k##length, k##prefix, \
1388 void vmovd(XMMRegister dst, Register src);
1389 void vmovd(XMMRegister dst, Operand src);
1390 void vmovd(Register dst, XMMRegister src);
1391 void vmovq(XMMRegister dst, Register src);
1392 void vmovq(XMMRegister dst, Operand src);
1393 void vmovq(Register dst, XMMRegister src);
1395 void vmovsd(XMMRegister dst, XMMRegister src1, XMMRegister src2) { in vmovsd() argument
1396 vsd(0x10, dst, src1, src2); in vmovsd()
1398 void vmovsd(XMMRegister dst, Operand src) { vsd(0x10, dst, xmm0, src); } in vmovsd() argument
1399 void vmovsd(Operand dst, XMMRegister src) { vsd(0x11, src, xmm0, dst); } in vmovsd() argument
1400 void vmovdqa(XMMRegister dst, Operand src);
1401 void vmovdqa(XMMRegister dst, XMMRegister src);
1402 void vmovdqa(YMMRegister dst, Operand src);
1403 void vmovdqa(YMMRegister dst, YMMRegister src);
1404 void vmovdqu(XMMRegister dst, Operand src);
1405 void vmovdqu(Operand dst, XMMRegister src);
1406 void vmovdqu(XMMRegister dst, XMMRegister src);
1407 void vmovdqu(YMMRegister dst, Operand src);
1408 void vmovdqu(Operand dst, YMMRegister src);
1409 void vmovdqu(YMMRegister dst, YMMRegister src);
1411 void vmovlps(XMMRegister dst, XMMRegister src1, Operand src2);
1412 void vmovlps(Operand dst, XMMRegister src);
1414 void vmovhps(XMMRegister dst, XMMRegister src1, Operand src2);
1415 void vmovhps(Operand dst, XMMRegister src);
1418 void v##instr(XMMRegister dst, XMMRegister src2) { \
1419 vps(0x##opcode, dst, xmm0, src2); \
1421 void v##instr(XMMRegister dst, Operand src2) { \
1422 vps(0x##opcode, dst, xmm0, src2); \
1424 void v##instr(YMMRegister dst, YMMRegister src2) { \
1425 vps(0x##opcode, dst, ymm0, src2); \
1427 void v##instr(YMMRegister dst, Operand src2) { \
1428 vps(0x##opcode, dst, ymm0, src2); \
1434 void v##instr(XMMRegister dst, XMMRegister src1, XMMRegister src2) { \
1435 vps(0x##opcode, dst, src1, src2); \
1437 void v##instr(XMMRegister dst, XMMRegister src1, Operand src2) { \
1438 vps(0x##opcode, dst, src1, src2); \
1440 void v##instr(YMMRegister dst, YMMRegister src1, YMMRegister src2) { \
1441 vps(0x##opcode, dst, src1, src2); \
1443 void v##instr(YMMRegister dst, YMMRegister src1, Operand src2) { \
1444 vps(0x##opcode, dst, src1, src2); \
1450 void instr(SIMDRegister dst, SIMDRegister src1, SIMDRegister src2) { \ in SSE_BINOP_INSTRUCTION_LIST()
1451 impl(opcode, dst, src1, src2); \ in SSE_BINOP_INSTRUCTION_LIST()
1453 void instr(SIMDRegister dst, SIMDRegister src1, Operand src2) { \
1454 impl(opcode, dst, src1, src2); \
1461 void v##instr(XMMRegister dst, XMMRegister src1, XMMRegister src2) { \
1462 vinstr(0x##opcode, dst, src1, src2, k##prefix, k##escape, kWIG); \
1464 void v##instr(XMMRegister dst, XMMRegister src1, Operand src2) { \
1465 vinstr(0x##opcode, dst, src1, src2, k##prefix, k##escape, kWIG); \
1474 void v##instr(XMMRegister dst, XMMRegister src, byte imm8) { \
1476 vinstr(0x##opcode, ext_reg, dst, src, k##prefix, k##escape, kWIG); \
1482 void vmovlhps(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
1483 vinstr(0x16, dst, src1, src2, kNoPrefix, k0F, kWIG);
1485 void vmovhlps(XMMRegister dst, XMMRegister src1, XMMRegister src2) { in vmovhlps() argument
1486 vinstr(0x12, dst, src1, src2, kNoPrefix, k0F, kWIG); in vmovhlps()
1488 void vcvtdq2pd(XMMRegister dst, XMMRegister src) { in vcvtdq2pd() argument
1489 vinstr(0xe6, dst, xmm0, src, kF3, k0F, kWIG); in vcvtdq2pd()
1491 void vcvttps2dq(XMMRegister dst, XMMRegister src) { in vcvttps2dq() argument
1492 vinstr(0x5b, dst, xmm0, src, kF3, k0F, kWIG); in vcvttps2dq()
1494 void vcvtlsi2sd(XMMRegister dst, XMMRegister src1, Register src2) { in vcvtlsi2sd() argument
1496 vinstr(0x2a, dst, src1, isrc2, kF2, k0F, kW0); in vcvtlsi2sd()
1498 void vcvtlsi2sd(XMMRegister dst, XMMRegister src1, Operand src2) { in vcvtlsi2sd() argument
1499 vinstr(0x2a, dst, src1, src2, kF2, k0F, kW0); in vcvtlsi2sd()
1501 void vcvtlsi2ss(XMMRegister dst, XMMRegister src1, Register src2) { in vcvtlsi2ss() argument
1503 vinstr(0x2a, dst, src1, isrc2, kF3, k0F, kW0); in vcvtlsi2ss()
1505 void vcvtlsi2ss(XMMRegister dst, XMMRegister src1, Operand src2) { in vcvtlsi2ss() argument
1506 vinstr(0x2a, dst, src1, src2, kF3, k0F, kW0); in vcvtlsi2ss()
1508 void vcvtqsi2ss(XMMRegister dst, XMMRegister src1, Register src2) { in vcvtqsi2ss() argument
1510 vinstr(0x2a, dst, src1, isrc2, kF3, k0F, kW1); in vcvtqsi2ss()
1512 void vcvtqsi2ss(XMMRegister dst, XMMRegister src1, Operand src2) { in vcvtqsi2ss() argument
1513 vinstr(0x2a, dst, src1, src2, kF3, k0F, kW1); in vcvtqsi2ss()
1515 void vcvtqsi2sd(XMMRegister dst, XMMRegister src1, Register src2) { in vcvtqsi2sd() argument
1517 vinstr(0x2a, dst, src1, isrc2, kF2, k0F, kW1); in vcvtqsi2sd()
1519 void vcvtqsi2sd(XMMRegister dst, XMMRegister src1, Operand src2) { in vcvtqsi2sd() argument
1520 vinstr(0x2a, dst, src1, src2, kF2, k0F, kW1); in vcvtqsi2sd()
1522 void vcvttss2si(Register dst, XMMRegister src) { in vcvttss2si() argument
1523 XMMRegister idst = XMMRegister::from_code(dst.code()); in vcvttss2si()
1526 void vcvttss2si(Register dst, Operand src) { in vcvttss2si() argument
1527 XMMRegister idst = XMMRegister::from_code(dst.code()); in vcvttss2si()
1530 void vcvttsd2si(Register dst, XMMRegister src) { in vcvttsd2si() argument
1531 XMMRegister idst = XMMRegister::from_code(dst.code()); in vcvttsd2si()
1534 void vcvttsd2si(Register dst, Operand src) { in vcvttsd2si() argument
1535 XMMRegister idst = XMMRegister::from_code(dst.code()); in vcvttsd2si()
1538 void vcvttss2siq(Register dst, XMMRegister src) { in vcvttss2siq() argument
1539 XMMRegister idst = XMMRegister::from_code(dst.code()); in vcvttss2siq()
1542 void vcvttss2siq(Register dst, Operand src) { in vcvttss2siq() argument
1543 XMMRegister idst = XMMRegister::from_code(dst.code()); in vcvttss2siq()
1546 void vcvttsd2siq(Register dst, XMMRegister src) { in vcvttsd2siq() argument
1547 XMMRegister idst = XMMRegister::from_code(dst.code()); in vcvttsd2siq()
1550 void vcvttsd2siq(Register dst, Operand src) { in vcvttsd2siq() argument
1551 XMMRegister idst = XMMRegister::from_code(dst.code()); in vcvttsd2siq()
1554 void vcvtsd2si(Register dst, XMMRegister src) { in vcvtsd2si() argument
1555 XMMRegister idst = XMMRegister::from_code(dst.code()); in vcvtsd2si()
1558 void vroundss(XMMRegister dst, XMMRegister src1, XMMRegister src2, in vroundss() argument
1560 vinstr(0x0a, dst, src1, src2, k66, k0F3A, kWIG); in vroundss()
1563 void vroundss(XMMRegister dst, XMMRegister src1, Operand src2, in vroundss() argument
1565 vinstr(0x0a, dst, src1, src2, k66, k0F3A, kWIG); in vroundss()
1568 void vroundsd(XMMRegister dst, XMMRegister src1, XMMRegister src2, in vroundsd() argument
1570 vinstr(0x0b, dst, src1, src2, k66, k0F3A, kWIG); in vroundsd()
1573 void vroundsd(XMMRegister dst, XMMRegister src1, Operand src2, in vroundsd() argument
1575 vinstr(0x0b, dst, src1, src2, k66, k0F3A, kWIG); in vroundsd()
1578 void vroundps(XMMRegister dst, XMMRegister src, RoundingMode mode) { in vroundps() argument
1579 vinstr(0x08, dst, xmm0, src, k66, k0F3A, kWIG); in vroundps()
1582 void vroundps(YMMRegister dst, YMMRegister src, RoundingMode mode) { in vroundps() argument
1583 vinstr(0x08, dst, ymm0, src, k66, k0F3A, kWIG, AVX); in vroundps()
1586 void vroundpd(XMMRegister dst, XMMRegister src, RoundingMode mode) { in vroundpd() argument
1587 vinstr(0x09, dst, xmm0, src, k66, k0F3A, kWIG); in vroundpd()
1590 void vroundpd(YMMRegister dst, YMMRegister src, RoundingMode mode) { in vroundpd() argument
1591 vinstr(0x09, dst, ymm0, src, k66, k0F3A, kWIG, AVX); in vroundpd()
1596 void vsd(byte op, Reg dst, Reg src1, Op src2) { in vsd() argument
1597 vinstr(op, dst, src1, src2, kF2, k0F, kWIG, AVX); in vsd()
1600 void vmovss(XMMRegister dst, XMMRegister src1, XMMRegister src2) { in vmovss() argument
1601 vss(0x10, dst, src1, src2); in vmovss()
1603 void vmovss(XMMRegister dst, Operand src) { vss(0x10, dst, xmm0, src); } in vmovss() argument
1604 void vmovss(Operand dst, XMMRegister src) { vss(0x11, src, xmm0, dst); } in vmovss() argument
1605 void vucomiss(XMMRegister dst, XMMRegister src);
1606 void vucomiss(XMMRegister dst, Operand src);
1607 void vss(byte op, XMMRegister dst, XMMRegister src1, XMMRegister src2);
1608 void vss(byte op, XMMRegister dst, XMMRegister src1, Operand src2);
1610 void vshufps(XMMRegister dst, XMMRegister src1, XMMRegister src2, byte imm8) { in vshufps() argument
1611 vps(0xC6, dst, src1, src2, imm8); in vshufps()
1613 void vshufps(YMMRegister dst, YMMRegister src1, YMMRegister src2, byte imm8) { in vshufps() argument
1614 vps(0xC6, dst, src1, src2, imm8); in vshufps()
1617 void vmovaps(XMMRegister dst, XMMRegister src) { vps(0x28, dst, xmm0, src); } in vmovaps() argument
1618 void vmovaps(YMMRegister dst, YMMRegister src) { vps(0x28, dst, ymm0, src); } in vmovaps() argument
1619 void vmovaps(XMMRegister dst, Operand src) { vps(0x28, dst, xmm0, src); } in vmovaps() argument
1620 void vmovaps(YMMRegister dst, Operand src) { vps(0x28, dst, ymm0, src); } in vmovaps() argument
1621 void vmovups(XMMRegister dst, XMMRegister src) { vps(0x10, dst, xmm0, src); } in vmovups() argument
1622 void vmovups(YMMRegister dst, YMMRegister src) { vps(0x10, dst, ymm0, src); } in vmovups() argument
1623 void vmovups(XMMRegister dst, Operand src) { vps(0x10, dst, xmm0, src); } in vmovups() argument
1624 void vmovups(YMMRegister dst, Operand src) { vps(0x10, dst, ymm0, src); } in vmovups() argument
1625 void vmovups(Operand dst, XMMRegister src) { vps(0x11, src, xmm0, dst); } in vmovups() argument
1626 void vmovups(Operand dst, YMMRegister src) { vps(0x11, src, ymm0, dst); } in vmovups() argument
1627 void vmovapd(XMMRegister dst, XMMRegister src) { vpd(0x28, dst, xmm0, src); } in vmovapd() argument
1628 void vmovapd(YMMRegister dst, YMMRegister src) { vpd(0x28, dst, ymm0, src); } in vmovapd() argument
1629 void vmovupd(XMMRegister dst, Operand src) { vpd(0x10, dst, xmm0, src); } in vmovupd() argument
1630 void vmovupd(YMMRegister dst, Operand src) { vpd(0x10, dst, ymm0, src); } in vmovupd() argument
1631 void vmovupd(Operand dst, XMMRegister src) { vpd(0x11, src, xmm0, dst); } in vmovupd() argument
1632 void vmovupd(Operand dst, YMMRegister src) { vpd(0x11, src, ymm0, dst); } in vmovupd() argument
1633 void vmovmskps(Register dst, XMMRegister src) { in vmovmskps() argument
1634 XMMRegister idst = XMMRegister::from_code(dst.code()); in vmovmskps()
1637 void vmovmskpd(Register dst, XMMRegister src) { in vmovmskpd() argument
1638 XMMRegister idst = XMMRegister::from_code(dst.code()); in vmovmskpd()
1641 void vpmovmskb(Register dst, XMMRegister src);
1642 void vcmpeqss(XMMRegister dst, XMMRegister src) { in vcmpeqss() argument
1643 vss(0xC2, dst, dst, src); in vcmpeqss()
1646 void vcmpeqsd(XMMRegister dst, XMMRegister src) { in vcmpeqsd() argument
1647 vsd(0xC2, dst, dst, src); in vcmpeqsd()
1650 void vcmpps(XMMRegister dst, XMMRegister src1, XMMRegister src2, int8_t cmp) { in vcmpps() argument
1651 vps(0xC2, dst, src1, src2); in vcmpps()
1654 void vcmpps(YMMRegister dst, YMMRegister src1, YMMRegister src2, int8_t cmp) { in vcmpps() argument
1655 vps(0xC2, dst, src1, src2); in vcmpps()
1658 void vcmpps(XMMRegister dst, XMMRegister src1, Operand src2, int8_t cmp) { in vcmpps() argument
1659 vps(0xC2, dst, src1, src2); in vcmpps()
1662 void vcmpps(YMMRegister dst, YMMRegister src1, Operand src2, int8_t cmp) { in vcmpps() argument
1663 vps(0xC2, dst, src1, src2); in vcmpps()
1666 void vcmppd(XMMRegister dst, XMMRegister src1, XMMRegister src2, int8_t cmp) { in vcmppd() argument
1667 vpd(0xC2, dst, src1, src2); in vcmppd()
1670 void vcmppd(YMMRegister dst, YMMRegister src1, YMMRegister src2, int8_t cmp) { in vcmppd() argument
1671 vpd(0xC2, dst, src1, src2); in vcmppd()
1674 void vcmppd(XMMRegister dst, XMMRegister src1, Operand src2, int8_t cmp) { in vcmppd() argument
1675 vpd(0xC2, dst, src1, src2); in vcmppd()
1678 void vcmppd(YMMRegister dst, YMMRegister src1, Operand src2, int8_t cmp) { in vcmppd() argument
1679 vpd(0xC2, dst, src1, src2); in vcmppd()
1683 void instr##ps(SIMDRegister dst, SIMDRegister src1, SIMDRegister src2) { \
1684 vcmpps(dst, src1, src2, imm8); \
1686 void instr##ps(SIMDRegister dst, SIMDRegister src1, Operand src2) { \
1687 vcmpps(dst, src1, src2, imm8); \
1689 void instr##pd(SIMDRegister dst, SIMDRegister src1, SIMDRegister src2) { \
1690 vcmppd(dst, src1, src2, imm8); \
1692 void instr##pd(SIMDRegister dst, SIMDRegister src1, Operand src2) { \
1693 vcmppd(dst, src1, src2, imm8); \
1715 void vlddqu(XMMRegister dst, Operand src) { in vlddqu() argument
1716 vinstr(0xF0, dst, xmm0, src, kF2, k0F, kWIG); in vlddqu()
1718 void vinsertps(XMMRegister dst, XMMRegister src1, XMMRegister src2, in vinsertps() argument
1720 vinstr(0x21, dst, src1, src2, k66, k0F3A, kWIG); in vinsertps()
1723 void vinsertps(XMMRegister dst, XMMRegister src1, Operand src2, byte imm8) { in vinsertps() argument
1724 vinstr(0x21, dst, src1, src2, k66, k0F3A, kWIG); in vinsertps()
1727 void vpextrq(Register dst, XMMRegister src, int8_t imm8) { in vpextrq() argument
1728 XMMRegister idst = XMMRegister::from_code(dst.code()); in vpextrq()
1732 void vpinsrb(XMMRegister dst, XMMRegister src1, Register src2, uint8_t imm8) { in vpinsrb() argument
1734 vinstr(0x20, dst, src1, isrc, k66, k0F3A, kW0); in vpinsrb()
1737 void vpinsrb(XMMRegister dst, XMMRegister src1, Operand src2, uint8_t imm8) { in vpinsrb() argument
1738 vinstr(0x20, dst, src1, src2, k66, k0F3A, kW0); in vpinsrb()
1741 void vpinsrw(XMMRegister dst, XMMRegister src1, Register src2, uint8_t imm8) { in vpinsrw() argument
1743 vinstr(0xc4, dst, src1, isrc, k66, k0F, kW0); in vpinsrw()
1746 void vpinsrw(XMMRegister dst, XMMRegister src1, Operand src2, uint8_t imm8) { in vpinsrw() argument
1747 vinstr(0xc4, dst, src1, src2, k66, k0F, kW0); in vpinsrw()
1750 void vpinsrd(XMMRegister dst, XMMRegister src1, Register src2, uint8_t imm8) { in vpinsrd() argument
1752 vinstr(0x22, dst, src1, isrc, k66, k0F3A, kW0); in vpinsrd()
1755 void vpinsrd(XMMRegister dst, XMMRegister src1, Operand src2, uint8_t imm8) { in vpinsrd() argument
1756 vinstr(0x22, dst, src1, src2, k66, k0F3A, kW0); in vpinsrd()
1759 void vpinsrq(XMMRegister dst, XMMRegister src1, Register src2, uint8_t imm8) { in vpinsrq() argument
1761 vinstr(0x22, dst, src1, isrc, k66, k0F3A, kW1); in vpinsrq()
1764 void vpinsrq(XMMRegister dst, XMMRegister src1, Operand src2, uint8_t imm8) { in vpinsrq() argument
1765 vinstr(0x22, dst, src1, src2, k66, k0F3A, kW1); in vpinsrq()
1769 void vpshufd(XMMRegister dst, XMMRegister src, uint8_t imm8) { in vpshufd() argument
1770 vinstr(0x70, dst, xmm0, src, k66, k0F, kWIG); in vpshufd()
1773 void vpshufd(YMMRegister dst, YMMRegister src, uint8_t imm8) { in vpshufd() argument
1774 vinstr(0x70, dst, ymm0, src, k66, k0F, kWIG); in vpshufd()
1777 void vpshufd(XMMRegister dst, Operand src, uint8_t imm8) { in vpshufd() argument
1778 vinstr(0x70, dst, xmm0, src, k66, k0F, kWIG); in vpshufd()
1781 void vpshufd(YMMRegister dst, Operand src, uint8_t imm8) { in vpshufd() argument
1782 vinstr(0x70, dst, ymm0, src, k66, k0F, kWIG); in vpshufd()
1785 void vpshuflw(XMMRegister dst, XMMRegister src, uint8_t imm8) { in vpshuflw() argument
1786 vinstr(0x70, dst, xmm0, src, kF2, k0F, kWIG); in vpshuflw()
1789 void vpshuflw(YMMRegister dst, YMMRegister src, uint8_t imm8) { in vpshuflw() argument
1790 vinstr(0x70, dst, ymm0, src, kF2, k0F, kWIG); in vpshuflw()
1793 void vpshuflw(XMMRegister dst, Operand src, uint8_t imm8) { in vpshuflw() argument
1794 vinstr(0x70, dst, xmm0, src, kF2, k0F, kWIG); in vpshuflw()
1797 void vpshuflw(YMMRegister dst, Operand src, uint8_t imm8) { in vpshuflw() argument
1798 vinstr(0x70, dst, ymm0, src, kF2, k0F, kWIG); in vpshuflw()
1801 void vpshufhw(XMMRegister dst, XMMRegister src, uint8_t imm8) { in vpshufhw() argument
1802 vinstr(0x70, dst, xmm0, src, kF3, k0F, kWIG); in vpshufhw()
1805 void vpshufhw(YMMRegister dst, YMMRegister src, uint8_t imm8) { in vpshufhw() argument
1806 vinstr(0x70, dst, ymm0, src, kF3, k0F, kWIG); in vpshufhw()
1809 void vpshufhw(XMMRegister dst, Operand src, uint8_t imm8) { in vpshufhw() argument
1810 vinstr(0x70, dst, xmm0, src, kF3, k0F, kWIG); in vpshufhw()
1813 void vpshufhw(YMMRegister dst, Operand src, uint8_t imm8) { in vpshufhw() argument
1814 vinstr(0x70, dst, ymm0, src, kF3, k0F, kWIG); in vpshufhw()
1818 void vpblendw(XMMRegister dst, XMMRegister src1, XMMRegister src2, in vpblendw() argument
1820 vinstr(0x0E, dst, src1, src2, k66, k0F3A, kWIG); in vpblendw()
1823 void vpblendw(YMMRegister dst, YMMRegister src1, YMMRegister src2, in vpblendw() argument
1825 vinstr(0x0E, dst, src1, src2, k66, k0F3A, kWIG); in vpblendw()
1828 void vpblendw(XMMRegister dst, XMMRegister src1, Operand src2, uint8_t mask) { in vpblendw() argument
1829 vinstr(0x0E, dst, src1, src2, k66, k0F3A, kWIG); in vpblendw()
1832 void vpblendw(YMMRegister dst, YMMRegister src1, Operand src2, uint8_t mask) { in vpblendw() argument
1833 vinstr(0x0E, dst, src1, src2, k66, k0F3A, kWIG); in vpblendw()
1837 void vpalignr(XMMRegister dst, XMMRegister src1, XMMRegister src2, in vpalignr() argument
1839 vinstr(0x0F, dst, src1, src2, k66, k0F3A, kWIG); in vpalignr()
1842 void vpalignr(YMMRegister dst, YMMRegister src1, YMMRegister src2, in vpalignr() argument
1844 vinstr(0x0F, dst, src1, src2, k66, k0F3A, kWIG); in vpalignr()
1847 void vpalignr(XMMRegister dst, XMMRegister src1, Operand src2, uint8_t imm8) { in vpalignr() argument
1848 vinstr(0x0F, dst, src1, src2, k66, k0F3A, kWIG); in vpalignr()
1851 void vpalignr(YMMRegister dst, YMMRegister src1, Operand src2, uint8_t imm8) { in vpalignr() argument
1852 vinstr(0x0F, dst, src1, src2, k66, k0F3A, kWIG); in vpalignr()
1856 void vps(byte op, XMMRegister dst, XMMRegister src1, XMMRegister src2);
1857 void vps(byte op, YMMRegister dst, YMMRegister src1, YMMRegister src2);
1858 void vps(byte op, XMMRegister dst, XMMRegister src1, Operand src2);
1859 void vps(byte op, YMMRegister dst, YMMRegister src1, Operand src2);
1860 void vps(byte op, XMMRegister dst, XMMRegister src1, XMMRegister src2,
1862 void vps(byte op, YMMRegister dst, YMMRegister src1, YMMRegister src2,
1864 void vpd(byte op, XMMRegister dst, XMMRegister src1, XMMRegister src2);
1865 void vpd(byte op, YMMRegister dst, YMMRegister src1, YMMRegister src2);
1866 void vpd(byte op, XMMRegister dst, XMMRegister src1, Operand src2);
1867 void vpd(byte op, YMMRegister dst, YMMRegister src1, Operand src2);
1872 void instr(Reg dst, Op src) { \
1873 vinstr(0x##opcode, dst, xmm0, src, k##prefix, k##escape1##escape2, kW0, \
1880 void andnq(Register dst, Register src1, Register src2) { in AVX2_BROADCAST_LIST()
1881 bmi1q(0xf2, dst, src1, src2); in AVX2_BROADCAST_LIST()
1883 void andnq(Register dst, Register src1, Operand src2) { in andnq() argument
1884 bmi1q(0xf2, dst, src1, src2); in andnq()
1886 void andnl(Register dst, Register src1, Register src2) { in andnl() argument
1887 bmi1l(0xf2, dst, src1, src2); in andnl()
1889 void andnl(Register dst, Register src1, Operand src2) { in andnl() argument
1890 bmi1l(0xf2, dst, src1, src2); in andnl()
1892 void bextrq(Register dst, Register src1, Register src2) { in bextrq() argument
1893 bmi1q(0xf7, dst, src2, src1); in bextrq()
1895 void bextrq(Register dst, Operand src1, Register src2) { in bextrq() argument
1896 bmi1q(0xf7, dst, src2, src1); in bextrq()
1898 void bextrl(Register dst, Register src1, Register src2) { in bextrl() argument
1899 bmi1l(0xf7, dst, src2, src1); in bextrl()
1901 void bextrl(Register dst, Operand src1, Register src2) { in bextrl() argument
1902 bmi1l(0xf7, dst, src2, src1); in bextrl()
1904 void blsiq(Register dst, Register src) { bmi1q(0xf3, rbx, dst, src); } in blsiq() argument
1905 void blsiq(Register dst, Operand src) { bmi1q(0xf3, rbx, dst, src); } in blsiq() argument
1906 void blsil(Register dst, Register src) { bmi1l(0xf3, rbx, dst, src); } in blsil() argument
1907 void blsil(Register dst, Operand src) { bmi1l(0xf3, rbx, dst, src); } in blsil() argument
1908 void blsmskq(Register dst, Register src) { bmi1q(0xf3, rdx, dst, src); } in blsmskq() argument
1909 void blsmskq(Register dst, Operand src) { bmi1q(0xf3, rdx, dst, src); } in blsmskq() argument
1910 void blsmskl(Register dst, Register src) { bmi1l(0xf3, rdx, dst, src); } in blsmskl() argument
1911 void blsmskl(Register dst, Operand src) { bmi1l(0xf3, rdx, dst, src); } in blsmskl() argument
1912 void blsrq(Register dst, Register src) { bmi1q(0xf3, rcx, dst, src); } in blsrq() argument
1913 void blsrq(Register dst, Operand src) { bmi1q(0xf3, rcx, dst, src); } in blsrq() argument
1914 void blsrl(Register dst, Register src) { bmi1l(0xf3, rcx, dst, src); } in blsrl() argument
1915 void blsrl(Register dst, Operand src) { bmi1l(0xf3, rcx, dst, src); } in blsrl() argument
1916 void tzcntq(Register dst, Register src);
1917 void tzcntq(Register dst, Operand src);
1918 void tzcntl(Register dst, Register src);
1919 void tzcntl(Register dst, Operand src);
1921 void lzcntq(Register dst, Register src);
1922 void lzcntq(Register dst, Operand src);
1923 void lzcntl(Register dst, Register src);
1924 void lzcntl(Register dst, Operand src);
1926 void popcntq(Register dst, Register src);
1927 void popcntq(Register dst, Operand src);
1928 void popcntl(Register dst, Register src);
1929 void popcntl(Register dst, Operand src);
1931 void bzhiq(Register dst, Register src1, Register src2) { in bzhiq() argument
1932 bmi2q(kNoPrefix, 0xf5, dst, src2, src1); in bzhiq()
1934 void bzhiq(Register dst, Operand src1, Register src2) { in bzhiq() argument
1935 bmi2q(kNoPrefix, 0xf5, dst, src2, src1); in bzhiq()
1937 void bzhil(Register dst, Register src1, Register src2) { in bzhil() argument
1938 bmi2l(kNoPrefix, 0xf5, dst, src2, src1); in bzhil()
1940 void bzhil(Register dst, Operand src1, Register src2) { in bzhil() argument
1941 bmi2l(kNoPrefix, 0xf5, dst, src2, src1); in bzhil()
1955 void pdepq(Register dst, Register src1, Register src2) { in pdepq() argument
1956 bmi2q(kF2, 0xf5, dst, src1, src2); in pdepq()
1958 void pdepq(Register dst, Register src1, Operand src2) { in pdepq() argument
1959 bmi2q(kF2, 0xf5, dst, src1, src2); in pdepq()
1961 void pdepl(Register dst, Register src1, Register src2) { in pdepl() argument
1962 bmi2l(kF2, 0xf5, dst, src1, src2); in pdepl()
1964 void pdepl(Register dst, Register src1, Operand src2) { in pdepl() argument
1965 bmi2l(kF2, 0xf5, dst, src1, src2); in pdepl()
1967 void pextq(Register dst, Register src1, Register src2) { in pextq() argument
1968 bmi2q(kF3, 0xf5, dst, src1, src2); in pextq()
1970 void pextq(Register dst, Register src1, Operand src2) { in pextq() argument
1971 bmi2q(kF3, 0xf5, dst, src1, src2); in pextq()
1973 void pextl(Register dst, Register src1, Register src2) { in pextl() argument
1974 bmi2l(kF3, 0xf5, dst, src1, src2); in pextl()
1976 void pextl(Register dst, Register src1, Operand src2) { in pextl() argument
1977 bmi2l(kF3, 0xf5, dst, src1, src2); in pextl()
1979 void sarxq(Register dst, Register src1, Register src2) { in sarxq() argument
1980 bmi2q(kF3, 0xf7, dst, src2, src1); in sarxq()
1982 void sarxq(Register dst, Operand src1, Register src2) { in sarxq() argument
1983 bmi2q(kF3, 0xf7, dst, src2, src1); in sarxq()
1985 void sarxl(Register dst, Register src1, Register src2) { in sarxl() argument
1986 bmi2l(kF3, 0xf7, dst, src2, src1); in sarxl()
1988 void sarxl(Register dst, Operand src1, Register src2) { in sarxl() argument
1989 bmi2l(kF3, 0xf7, dst, src2, src1); in sarxl()
1991 void shlxq(Register dst, Register src1, Register src2) { in shlxq() argument
1992 bmi2q(k66, 0xf7, dst, src2, src1); in shlxq()
1994 void shlxq(Register dst, Operand src1, Register src2) { in shlxq() argument
1995 bmi2q(k66, 0xf7, dst, src2, src1); in shlxq()
1997 void shlxl(Register dst, Register src1, Register src2) { in shlxl() argument
1998 bmi2l(k66, 0xf7, dst, src2, src1); in shlxl()
2000 void shlxl(Register dst, Operand src1, Register src2) { in shlxl() argument
2001 bmi2l(k66, 0xf7, dst, src2, src1); in shlxl()
2003 void shrxq(Register dst, Register src1, Register src2) { in shrxq() argument
2004 bmi2q(kF2, 0xf7, dst, src2, src1); in shrxq()
2006 void shrxq(Register dst, Operand src1, Register src2) { in shrxq() argument
2007 bmi2q(kF2, 0xf7, dst, src2, src1); in shrxq()
2009 void shrxl(Register dst, Register src1, Register src2) { in shrxl() argument
2010 bmi2l(kF2, 0xf7, dst, src2, src1); in shrxl()
2012 void shrxl(Register dst, Operand src1, Register src2) { in shrxl() argument
2013 bmi2l(kF2, 0xf7, dst, src2, src1); in shrxl()
2015 void rorxq(Register dst, Register src, byte imm8);
2016 void rorxq(Register dst, Operand src, byte imm8);
2017 void rorxl(Register dst, Register src, byte imm8);
2018 void rorxl(Register dst, Operand src, byte imm8);
2271 void emit_sse_operand(XMMRegister dst, XMMRegister src);
2274 void emit_sse_operand(XMMRegister dst, Register src);
2275 void emit_sse_operand(Register dst, XMMRegister src);
2276 void emit_sse_operand(XMMRegister dst);
2290 void immediate_arithmetic_op_8(byte subcode, Register dst, Immediate src);
2291 void immediate_arithmetic_op_8(byte subcode, Operand dst, Immediate src);
2293 void immediate_arithmetic_op_16(byte subcode, Register dst, Immediate src);
2294 void immediate_arithmetic_op_16(byte subcode, Operand dst, Immediate src);
2296 void immediate_arithmetic_op(byte subcode, Register dst, Immediate src,
2298 void immediate_arithmetic_op(byte subcode, Operand dst, Immediate src,
2302 void shift(Operand dst, Immediate shift_amount, int subcode, int size);
2303 void shift(Register dst, Immediate shift_amount, int subcode, int size);
2305 void shift(Register dst, int subcode, int size);
2306 void shift(Operand dst, int subcode, int size);
2318 void emit_add(Register dst, Register src, int size) { in emit_add() argument
2319 arithmetic_op(0x03, dst, src, size); in emit_add()
2322 void emit_add(Register dst, Immediate src, int size) { in emit_add() argument
2323 immediate_arithmetic_op(0x0, dst, src, size); in emit_add()
2326 void emit_add(Register dst, Operand src, int size) { in emit_add() argument
2327 arithmetic_op(0x03, dst, src, size); in emit_add()
2330 void emit_add(Operand dst, Register src, int size) { in emit_add() argument
2331 arithmetic_op(0x1, src, dst, size); in emit_add()
2334 void emit_add(Operand dst, Immediate src, int size) { in emit_add() argument
2335 immediate_arithmetic_op(0x0, dst, src, size); in emit_add()
2338 void emit_and(Register dst, Register src, int size) { in emit_and() argument
2339 arithmetic_op(0x23, dst, src, size); in emit_and()
2342 void emit_and(Register dst, Operand src, int size) { in emit_and() argument
2343 arithmetic_op(0x23, dst, src, size); in emit_and()
2346 void emit_and(Operand dst, Register src, int size) { in emit_and() argument
2347 arithmetic_op(0x21, src, dst, size); in emit_and()
2350 void emit_and(Register dst, Immediate src, int size) { in emit_and() argument
2351 immediate_arithmetic_op(0x4, dst, src, size); in emit_and()
2354 void emit_and(Operand dst, Immediate src, int size) { in emit_and() argument
2355 immediate_arithmetic_op(0x4, dst, src, size); in emit_and()
2358 void emit_cmp(Register dst, Register src, int size) { in emit_cmp() argument
2359 arithmetic_op(0x3B, dst, src, size); in emit_cmp()
2362 void emit_cmp(Register dst, Operand src, int size) { in emit_cmp() argument
2363 arithmetic_op(0x3B, dst, src, size); in emit_cmp()
2366 void emit_cmp(Operand dst, Register src, int size) { in emit_cmp() argument
2367 arithmetic_op(0x39, src, dst, size); in emit_cmp()
2370 void emit_cmp(Register dst, Immediate src, int size) { in emit_cmp() argument
2371 immediate_arithmetic_op(0x7, dst, src, size); in emit_cmp()
2374 void emit_cmp(Operand dst, Immediate src, int size) { in emit_cmp() argument
2375 immediate_arithmetic_op(0x7, dst, src, size); in emit_cmp()
2381 void emit_cmpxchg(Operand dst, Register src, int size);
2383 void emit_dec(Register dst, int size);
2384 void emit_dec(Operand dst, int size);
2396 void emit_imul(Register dst, Register src, int size);
2397 void emit_imul(Register dst, Operand src, int size);
2398 void emit_imul(Register dst, Register src, Immediate imm, int size);
2399 void emit_imul(Register dst, Operand src, Immediate imm, int size);
2401 void emit_inc(Register dst, int size);
2402 void emit_inc(Operand dst, int size);
2404 void emit_lea(Register dst, Operand src, int size);
2406 void emit_mov(Register dst, Operand src, int size);
2407 void emit_mov(Register dst, Register src, int size);
2408 void emit_mov(Operand dst, Register src, int size);
2409 void emit_mov(Register dst, Immediate value, int size);
2410 void emit_mov(Operand dst, Immediate value, int size);
2411 void emit_mov(Register dst, Immediate64 value, int size);
2413 void emit_movzxb(Register dst, Operand src, int size);
2414 void emit_movzxb(Register dst, Register src, int size);
2415 void emit_movzxw(Register dst, Operand src, int size);
2416 void emit_movzxw(Register dst, Register src, int size);
2418 void emit_neg(Register dst, int size);
2419 void emit_neg(Operand dst, int size);
2421 void emit_not(Register dst, int size);
2422 void emit_not(Operand dst, int size);
2424 void emit_or(Register dst, Register src, int size) { in emit_or() argument
2425 arithmetic_op(0x0B, dst, src, size); in emit_or()
2428 void emit_or(Register dst, Operand src, int size) { in emit_or() argument
2429 arithmetic_op(0x0B, dst, src, size); in emit_or()
2432 void emit_or(Operand dst, Register src, int size) { in emit_or() argument
2433 arithmetic_op(0x9, src, dst, size); in emit_or()
2436 void emit_or(Register dst, Immediate src, int size) { in emit_or() argument
2437 immediate_arithmetic_op(0x1, dst, src, size); in emit_or()
2440 void emit_or(Operand dst, Immediate src, int size) { in emit_or() argument
2441 immediate_arithmetic_op(0x1, dst, src, size); in emit_or()
2446 void emit_sbb(Register dst, Register src, int size) { in emit_sbb() argument
2447 arithmetic_op(0x1b, dst, src, size); in emit_sbb()
2450 void emit_sub(Register dst, Register src, int size) { in emit_sub() argument
2451 arithmetic_op(0x2B, dst, src, size); in emit_sub()
2454 void emit_sub(Register dst, Immediate src, int size) { in emit_sub() argument
2455 immediate_arithmetic_op(0x5, dst, src, size); in emit_sub()
2458 void emit_sub(Register dst, Operand src, int size) { in emit_sub() argument
2459 arithmetic_op(0x2B, dst, src, size); in emit_sub()
2462 void emit_sub(Operand dst, Register src, int size) { in emit_sub() argument
2463 arithmetic_op(0x29, src, dst, size); in emit_sub()
2466 void emit_sub(Operand dst, Immediate src, int size) { in emit_sub() argument
2467 immediate_arithmetic_op(0x5, dst, src, size); in emit_sub()
2470 void emit_test(Register dst, Register src, int size);
2478 void emit_xchg(Register dst, Register src, int size);
2479 void emit_xchg(Register dst, Operand src, int size);
2481 void emit_xor(Register dst, Register src, int size) { in emit_xor() argument
2482 if (size == kInt64Size && dst.code() == src.code()) { in emit_xor()
2485 arithmetic_op(0x33, dst, src, kInt32Size); in emit_xor()
2487 arithmetic_op(0x33, dst, src, size); in emit_xor()
2491 void emit_xor(Register dst, Operand src, int size) { in emit_xor() argument
2492 arithmetic_op(0x33, dst, src, size); in emit_xor()
2495 void emit_xor(Register dst, Immediate src, int size) { in emit_xor() argument
2496 immediate_arithmetic_op(0x6, dst, src, size); in emit_xor()
2499 void emit_xor(Operand dst, Immediate src, int size) { in emit_xor() argument
2500 immediate_arithmetic_op(0x6, dst, src, size); in emit_xor()
2503 void emit_xor(Operand dst, Register src, int size) { in emit_xor() argument
2504 arithmetic_op(0x31, src, dst, size); in emit_xor()
2552 void Assembler::vinstr(byte op, YMMRegister dst, YMMRegister src1,
2556 void Assembler::vinstr(byte op, YMMRegister dst, XMMRegister src1,
2560 void Assembler::vinstr(byte op, YMMRegister dst, YMMRegister src1,
2564 void Assembler::vinstr(byte op, YMMRegister dst, YMMRegister src1,
2568 void Assembler::vinstr(byte op, YMMRegister dst, XMMRegister src1,