• Home
  • Raw
  • Download

Lines Matching refs:pfx

683 static Bool IS_VALID_PFX ( Prefix pfx ) {  in IS_VALID_PFX()  argument
684 return toBool((pfx & 0xFF000000) == PFX_EMPTY); in IS_VALID_PFX()
687 static Bool haveREX ( Prefix pfx ) { in haveREX() argument
688 return toBool(pfx & PFX_REX); in haveREX()
691 static Int getRexW ( Prefix pfx ) { in getRexW() argument
692 return (pfx & PFX_REXW) ? 1 : 0; in getRexW()
694 static Int getRexR ( Prefix pfx ) { in getRexR() argument
695 return (pfx & PFX_REXR) ? 1 : 0; in getRexR()
697 static Int getRexX ( Prefix pfx ) { in getRexX() argument
698 return (pfx & PFX_REXX) ? 1 : 0; in getRexX()
700 static Int getRexB ( Prefix pfx ) { in getRexB() argument
701 return (pfx & PFX_REXB) ? 1 : 0; in getRexB()
706 static Bool haveF2orF3 ( Prefix pfx ) { in haveF2orF3() argument
707 return toBool((pfx & (PFX_F2|PFX_F3)) > 0); in haveF2orF3()
709 static Bool haveF2andF3 ( Prefix pfx ) { in haveF2andF3() argument
710 return toBool((pfx & (PFX_F2|PFX_F3)) == (PFX_F2|PFX_F3)); in haveF2andF3()
712 static Bool haveF2 ( Prefix pfx ) { in haveF2() argument
713 return toBool((pfx & PFX_F2) > 0); in haveF2()
715 static Bool haveF3 ( Prefix pfx ) { in haveF3() argument
716 return toBool((pfx & PFX_F3) > 0); in haveF3()
719 static Bool have66 ( Prefix pfx ) { in have66() argument
720 return toBool((pfx & PFX_66) > 0); in have66()
722 static Bool haveASO ( Prefix pfx ) { in haveASO() argument
723 return toBool((pfx & PFX_ASO) > 0); in haveASO()
725 static Bool haveLOCK ( Prefix pfx ) { in haveLOCK() argument
726 return toBool((pfx & PFX_LOCK) > 0); in haveLOCK()
730 static Bool have66noF2noF3 ( Prefix pfx ) in have66noF2noF3() argument
733 toBool((pfx & (PFX_66|PFX_F2|PFX_F3)) == PFX_66); in have66noF2noF3()
737 static Bool haveF2no66noF3 ( Prefix pfx ) in haveF2no66noF3() argument
740 toBool((pfx & (PFX_66|PFX_F2|PFX_F3)) == PFX_F2); in haveF2no66noF3()
744 static Bool haveF3no66noF2 ( Prefix pfx ) in haveF3no66noF2() argument
747 toBool((pfx & (PFX_66|PFX_F2|PFX_F3)) == PFX_F3); in haveF3no66noF2()
751 static Bool haveF3noF2 ( Prefix pfx ) in haveF3noF2() argument
754 toBool((pfx & (PFX_F2|PFX_F3)) == PFX_F3); in haveF3noF2()
758 static Bool haveF2noF3 ( Prefix pfx ) in haveF2noF3() argument
761 toBool((pfx & (PFX_F2|PFX_F3)) == PFX_F2); in haveF2noF3()
765 static Bool haveNo66noF2noF3 ( Prefix pfx ) in haveNo66noF2noF3() argument
768 toBool((pfx & (PFX_66|PFX_F2|PFX_F3)) == 0); in haveNo66noF2noF3()
772 static Bool have66orF2orF3 ( Prefix pfx ) in have66orF2orF3() argument
774 return toBool( ! haveNo66noF2noF3(pfx) ); in have66orF2orF3()
778 static Bool have66orF3 ( Prefix pfx ) in have66orF3() argument
780 return toBool((pfx & (PFX_66|PFX_F3)) > 0); in have66orF3()
791 static UInt getVexNvvvv ( Prefix pfx ) { in getVexNvvvv() argument
792 UInt r = (UInt)pfx; in getVexNvvvv()
797 static Bool haveVEX ( Prefix pfx ) { in haveVEX() argument
798 return toBool(pfx & PFX_VEX); in haveVEX()
801 static Int getVexL ( Prefix pfx ) { in getVexL() argument
802 return (pfx & PFX_VEXL) ? 1 : 0; in getVexL()
1164 static IRExpr* getIReg64rexX ( Prefix pfx, UInt lo3bits ) in getIReg64rexX() argument
1167 vassert(IS_VALID_PFX(pfx)); in getIReg64rexX()
1168 return getIReg64( lo3bits | (getRexX(pfx) << 3) ); in getIReg64rexX()
1171 static const HChar* nameIReg64rexX ( Prefix pfx, UInt lo3bits ) in nameIReg64rexX() argument
1174 vassert(IS_VALID_PFX(pfx)); in nameIReg64rexX()
1175 return nameIReg( 8, lo3bits | (getRexX(pfx) << 3), False ); in nameIReg64rexX()
1178 static const HChar* nameIRegRexB ( Int sz, Prefix pfx, UInt lo3bits ) in nameIRegRexB() argument
1181 vassert(IS_VALID_PFX(pfx)); in nameIRegRexB()
1183 return nameIReg( sz, lo3bits | (getRexB(pfx) << 3), in nameIRegRexB()
1184 toBool(sz==1 && !haveREX(pfx)) ); in nameIRegRexB()
1187 static IRExpr* getIRegRexB ( Int sz, Prefix pfx, UInt lo3bits ) in getIRegRexB() argument
1190 vassert(IS_VALID_PFX(pfx)); in getIRegRexB()
1196 offsetIReg( sz, lo3bits | (getRexB(pfx) << 3), in getIRegRexB()
1203 offsetIReg( sz, lo3bits | (getRexB(pfx) << 3), in getIRegRexB()
1204 toBool(sz==1 && !haveREX(pfx)) ), in getIRegRexB()
1210 static void putIRegRexB ( Int sz, Prefix pfx, UInt lo3bits, IRExpr* e ) in putIRegRexB() argument
1213 vassert(IS_VALID_PFX(pfx)); in putIRegRexB()
1217 offsetIReg( sz, lo3bits | (getRexB(pfx) << 3), in putIRegRexB()
1218 toBool(sz==1 && !haveREX(pfx)) ), in putIRegRexB()
1232 static UInt gregOfRexRM ( Prefix pfx, UChar mod_reg_rm ) in gregOfRexRM() argument
1235 reg += (pfx & PFX_REXR) ? 8 : 0; in gregOfRexRM()
1244 static UInt eregOfRexRM ( Prefix pfx, UChar mod_reg_rm ) in eregOfRexRM() argument
1249 rm += (pfx & PFX_REXB) ? 8 : 0; in eregOfRexRM()
1260 static UInt offsetIRegG ( Int sz, Prefix pfx, UChar mod_reg_rm ) in offsetIRegG() argument
1264 vassert(IS_VALID_PFX(pfx)); in offsetIRegG()
1266 reg = gregOfRexRM( pfx, mod_reg_rm ); in offsetIRegG()
1267 return offsetIReg( sz, reg, toBool(sz == 1 && !haveREX(pfx)) ); in offsetIRegG()
1271 IRExpr* getIRegG ( Int sz, Prefix pfx, UChar mod_reg_rm ) in getIRegG() argument
1276 IRExpr_Get( offsetIRegG( sz, pfx, mod_reg_rm ), in getIRegG()
1279 return IRExpr_Get( offsetIRegG( sz, pfx, mod_reg_rm ), in getIRegG()
1285 void putIRegG ( Int sz, Prefix pfx, UChar mod_reg_rm, IRExpr* e ) in putIRegG() argument
1291 stmt( IRStmt_Put( offsetIRegG( sz, pfx, mod_reg_rm ), e ) ); in putIRegG()
1295 const HChar* nameIRegG ( Int sz, Prefix pfx, UChar mod_reg_rm ) in nameIRegG() argument
1297 return nameIReg( sz, gregOfRexRM(pfx,mod_reg_rm), in nameIRegG()
1298 toBool(sz==1 && !haveREX(pfx)) ); in nameIRegG()
1303 IRExpr* getIRegV ( Int sz, Prefix pfx ) in getIRegV() argument
1308 IRExpr_Get( offsetIReg( sz, getVexNvvvv(pfx), False ), in getIRegV()
1311 return IRExpr_Get( offsetIReg( sz, getVexNvvvv(pfx), False ), in getIRegV()
1317 void putIRegV ( Int sz, Prefix pfx, IRExpr* e ) in putIRegV() argument
1323 stmt( IRStmt_Put( offsetIReg( sz, getVexNvvvv(pfx), False ), e ) ); in putIRegV()
1327 const HChar* nameIRegV ( Int sz, Prefix pfx ) in nameIRegV() argument
1329 return nameIReg( sz, getVexNvvvv(pfx), False ); in nameIRegV()
1339 static UInt offsetIRegE ( Int sz, Prefix pfx, UChar mod_reg_rm ) in offsetIRegE() argument
1343 vassert(IS_VALID_PFX(pfx)); in offsetIRegE()
1345 reg = eregOfRexRM( pfx, mod_reg_rm ); in offsetIRegE()
1346 return offsetIReg( sz, reg, toBool(sz == 1 && !haveREX(pfx)) ); in offsetIRegE()
1350 IRExpr* getIRegE ( Int sz, Prefix pfx, UChar mod_reg_rm ) in getIRegE() argument
1355 IRExpr_Get( offsetIRegE( sz, pfx, mod_reg_rm ), in getIRegE()
1358 return IRExpr_Get( offsetIRegE( sz, pfx, mod_reg_rm ), in getIRegE()
1364 void putIRegE ( Int sz, Prefix pfx, UChar mod_reg_rm, IRExpr* e ) in putIRegE() argument
1370 stmt( IRStmt_Put( offsetIRegE( sz, pfx, mod_reg_rm ), e ) ); in putIRegE()
1374 const HChar* nameIRegE ( Int sz, Prefix pfx, UChar mod_reg_rm ) in nameIRegE() argument
1376 return nameIReg( sz, eregOfRexRM(pfx,mod_reg_rm), in nameIRegE()
1377 toBool(sz==1 && !haveREX(pfx)) ); in nameIRegE()
2371 const HChar* segRegTxt ( Prefix pfx ) in segRegTxt() argument
2373 if (pfx & PFX_CS) return "%cs:"; in segRegTxt()
2374 if (pfx & PFX_DS) return "%ds:"; in segRegTxt()
2375 if (pfx & PFX_ES) return "%es:"; in segRegTxt()
2376 if (pfx & PFX_FS) return "%fs:"; in segRegTxt()
2377 if (pfx & PFX_GS) return "%gs:"; in segRegTxt()
2378 if (pfx & PFX_SS) return "%ss:"; in segRegTxt()
2389 Prefix pfx, IRExpr* virtual ) in handleAddrOverrides() argument
2392 if (haveASO(pfx)) in handleAddrOverrides()
2400 if (pfx & PFX_FS) { in handleAddrOverrides()
2410 if (pfx & PFX_GS) { in handleAddrOverrides()
2513 const VexAbiInfo* vbi, Prefix pfx, Long delta, in disAMode() argument
2537 DIS(buf, "%s(%s)", segRegTxt(pfx), nameIRegRexB(8,pfx,rm)); in disAMode()
2540 handleAddrOverrides(vbi, pfx, getIRegRexB(8,pfx,rm))); in disAMode()
2551 DIS(buf, "%s(%s)", segRegTxt(pfx), nameIRegRexB(8,pfx,rm)); in disAMode()
2553 DIS(buf, "%s%lld(%s)", segRegTxt(pfx), d, nameIRegRexB(8,pfx,rm)); in disAMode()
2557 handleAddrOverrides(vbi, pfx, in disAMode()
2558 binop(Iop_Add64,getIRegRexB(8,pfx,rm),mkU64(d)))); in disAMode()
2568 DIS(buf, "%s%lld(%s)", segRegTxt(pfx), d, nameIRegRexB(8,pfx,rm)); in disAMode()
2571 handleAddrOverrides(vbi, pfx, in disAMode()
2572 binop(Iop_Add64,getIRegRexB(8,pfx,rm),mkU64(d)))); in disAMode()
2586 DIS(buf, "%s%lld(%%rip)", segRegTxt(pfx), d); in disAMode()
2596 handleAddrOverrides(vbi, pfx, in disAMode()
2624 Bool index_is_SP = toBool(index_r == R_RSP && 0==getRexX(pfx)); in disAMode()
2629 DIS(buf, "%s(%s,%s)", segRegTxt(pfx), in disAMode()
2630 nameIRegRexB(8,pfx,base_r), in disAMode()
2631 nameIReg64rexX(pfx,index_r)); in disAMode()
2633 DIS(buf, "%s(%s,%s,%d)", segRegTxt(pfx), in disAMode()
2634 nameIRegRexB(8,pfx,base_r), in disAMode()
2635 nameIReg64rexX(pfx,index_r), 1<<scale); in disAMode()
2640 handleAddrOverrides(vbi, pfx, in disAMode()
2642 getIRegRexB(8,pfx,base_r), in disAMode()
2643 binop(Iop_Shl64, getIReg64rexX(pfx,index_r), in disAMode()
2649 DIS(buf, "%s%lld(,%s,%d)", segRegTxt(pfx), d, in disAMode()
2650 nameIReg64rexX(pfx,index_r), 1<<scale); in disAMode()
2654 handleAddrOverrides(vbi, pfx, in disAMode()
2656 binop(Iop_Shl64, getIReg64rexX(pfx,index_r), in disAMode()
2662 DIS(buf, "%s(%s)", segRegTxt(pfx), nameIRegRexB(8,pfx,base_r)); in disAMode()
2665 handleAddrOverrides(vbi, pfx, getIRegRexB(8,pfx,base_r))); in disAMode()
2670 DIS(buf, "%s%lld", segRegTxt(pfx), d); in disAMode()
2673 handleAddrOverrides(vbi, pfx, mkU64(d))); in disAMode()
2695 if (index_r == R_RSP && 0==getRexX(pfx)) { in disAMode()
2696 DIS(buf, "%s%lld(%s)", segRegTxt(pfx), in disAMode()
2697 d, nameIRegRexB(8,pfx,base_r)); in disAMode()
2700 handleAddrOverrides(vbi, pfx, in disAMode()
2701 binop(Iop_Add64, getIRegRexB(8,pfx,base_r), mkU64(d)) )); in disAMode()
2704 DIS(buf, "%s%lld(%s,%s)", segRegTxt(pfx), d, in disAMode()
2705 nameIRegRexB(8,pfx,base_r), in disAMode()
2706 nameIReg64rexX(pfx,index_r)); in disAMode()
2708 DIS(buf, "%s%lld(%s,%s,%d)", segRegTxt(pfx), d, in disAMode()
2709 nameIRegRexB(8,pfx,base_r), in disAMode()
2710 nameIReg64rexX(pfx,index_r), 1<<scale); in disAMode()
2715 handleAddrOverrides(vbi, pfx, in disAMode()
2718 getIRegRexB(8,pfx,base_r), in disAMode()
2720 getIReg64rexX(pfx,index_r), mkU8(scale))), in disAMode()
2742 if (index_r == R_RSP && 0==getRexX(pfx)) { in disAMode()
2743 DIS(buf, "%s%lld(%s)", segRegTxt(pfx), in disAMode()
2744 d, nameIRegRexB(8,pfx,base_r)); in disAMode()
2747 handleAddrOverrides(vbi, pfx, in disAMode()
2748 binop(Iop_Add64, getIRegRexB(8,pfx,base_r), mkU64(d)) )); in disAMode()
2751 DIS(buf, "%s%lld(%s,%s)", segRegTxt(pfx), d, in disAMode()
2752 nameIRegRexB(8,pfx,base_r), in disAMode()
2753 nameIReg64rexX(pfx,index_r)); in disAMode()
2755 DIS(buf, "%s%lld(%s,%s,%d)", segRegTxt(pfx), d, in disAMode()
2756 nameIRegRexB(8,pfx,base_r), in disAMode()
2757 nameIReg64rexX(pfx,index_r), 1<<scale); in disAMode()
2762 handleAddrOverrides(vbi, pfx, in disAMode()
2765 getIRegRexB(8,pfx,base_r), in disAMode()
2767 getIReg64rexX(pfx,index_r), mkU8(scale))), in disAMode()
2785 const VexAbiInfo* vbi, Prefix pfx, Long delta, in disAVSIBMode() argument
2809 *rI = index_r | (getRexX(pfx) << 3); in disAVSIBMode()
2822 DIS(buf, "%s%lld(,%s)", segRegTxt(pfx), d, vindex); in disAVSIBMode()
2824 DIS(buf, "%s%lld(,%s,%d)", segRegTxt(pfx), d, vindex, 1<<scale); in disAVSIBMode()
2829 DIS(buf, "%s(%s,%s)", segRegTxt(pfx), in disAVSIBMode()
2830 nameIRegRexB(8,pfx,base_r), vindex); in disAVSIBMode()
2832 DIS(buf, "%s(%s,%s,%d)", segRegTxt(pfx), in disAVSIBMode()
2833 nameIRegRexB(8,pfx,base_r), vindex, 1<<scale); in disAVSIBMode()
2846 DIS(buf, "%s%lld(%s,%s)", segRegTxt(pfx), d, in disAVSIBMode()
2847 nameIRegRexB(8,pfx,base_r), vindex); in disAVSIBMode()
2849 DIS(buf, "%s%lld(%s,%s,%d)", segRegTxt(pfx), d, in disAVSIBMode()
2850 nameIRegRexB(8,pfx,base_r), vindex, 1<<scale); in disAVSIBMode()
2856 return disAMode_copy2tmp( getIRegRexB(8,pfx,base_r) ); in disAVSIBMode()
2857 return disAMode_copy2tmp( binop(Iop_Add64, getIRegRexB(8,pfx,base_r), in disAVSIBMode()
2866 static UInt lengthAMode ( Prefix pfx, Long delta ) in lengthAMode() argument
2978 Prefix pfx, in dis_op2_E_G() argument
3028 && offsetIRegG(size,pfx,rm) == offsetIRegE(size,pfx,rm)) { in dis_op2_E_G()
3029 putIRegG(size,pfx,rm, mkU(ty,0)); in dis_op2_E_G()
3032 assign( dst0, getIRegG(size,pfx,rm) ); in dis_op2_E_G()
3033 assign( src, getIRegE(size,pfx,rm) ); in dis_op2_E_G()
3038 putIRegG(size, pfx, rm, mkexpr(dst1)); in dis_op2_E_G()
3043 putIRegG(size, pfx, rm, mkexpr(dst1)); in dis_op2_E_G()
3047 putIRegG(size, pfx, rm, mkexpr(dst1)); in dis_op2_E_G()
3051 putIRegG(size, pfx, rm, mkexpr(dst1)); in dis_op2_E_G()
3059 putIRegG(size, pfx, rm, mkexpr(dst1)); in dis_op2_E_G()
3063 nameIRegE(size,pfx,rm), in dis_op2_E_G()
3064 nameIRegG(size,pfx,rm)); in dis_op2_E_G()
3068 addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 ); in dis_op2_E_G()
3069 assign( dst0, getIRegG(size,pfx,rm) ); in dis_op2_E_G()
3075 putIRegG(size, pfx, rm, mkexpr(dst1)); in dis_op2_E_G()
3080 putIRegG(size, pfx, rm, mkexpr(dst1)); in dis_op2_E_G()
3096 putIRegG(size, pfx, rm, mkexpr(dst1)); in dis_op2_E_G()
3100 dis_buf, nameIRegG(size, pfx, rm)); in dis_op2_E_G()
3128 Prefix pfx, in dis_op2_G_E() argument
3172 && offsetIRegG(size,pfx,rm) == offsetIRegE(size,pfx,rm)) { in dis_op2_G_E()
3173 putIRegE(size,pfx,rm, mkU(ty,0)); in dis_op2_G_E()
3176 assign(dst0, getIRegE(size,pfx,rm)); in dis_op2_G_E()
3177 assign(src, getIRegG(size,pfx,rm)); in dis_op2_G_E()
3182 putIRegE(size, pfx, rm, mkexpr(dst1)); in dis_op2_G_E()
3187 putIRegE(size, pfx, rm, mkexpr(dst1)); in dis_op2_G_E()
3195 putIRegE(size, pfx, rm, mkexpr(dst1)); in dis_op2_G_E()
3199 nameIRegG(size,pfx,rm), in dis_op2_G_E()
3200 nameIRegE(size,pfx,rm)); in dis_op2_G_E()
3206 addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 ); in dis_op2_G_E()
3208 assign(src, getIRegG(size,pfx,rm)); in dis_op2_G_E()
3211 if (haveLOCK(pfx)) { in dis_op2_G_E()
3222 if (haveLOCK(pfx)) { in dis_op2_G_E()
3234 if (haveLOCK(pfx)) { in dis_op2_G_E()
3251 nameIRegG(size,pfx,rm), dis_buf); in dis_op2_G_E()
3275 Prefix pfx, in dis_mov_E_G() argument
3284 putIRegG(size, pfx, rm, getIRegE(size, pfx, rm)); in dis_mov_E_G()
3286 nameIRegE(size,pfx,rm), in dis_mov_E_G()
3287 nameIRegG(size,pfx,rm)); in dis_mov_E_G()
3293 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 ); in dis_mov_E_G()
3294 putIRegG(size, pfx, rm, loadLE(szToITy(size), mkexpr(addr))); in dis_mov_E_G()
3297 nameIRegG(size,pfx,rm)); in dis_mov_E_G()
3322 Prefix pfx, in dis_mov_G_E() argument
3334 if (haveF2orF3(pfx)) { *ok = False; return delta0; } in dis_mov_G_E()
3335 putIRegE(size, pfx, rm, getIRegG(size, pfx, rm)); in dis_mov_G_E()
3337 nameIRegG(size,pfx,rm), in dis_mov_G_E()
3338 nameIRegE(size,pfx,rm)); in dis_mov_G_E()
3344 if (haveF2(pfx)) { *ok = False; return delta0; } in dis_mov_G_E()
3346 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 ); in dis_mov_G_E()
3347 storeLE( mkexpr(addr), getIRegG(size, pfx, rm) ); in dis_mov_G_E()
3349 nameIRegG(size,pfx,rm), in dis_mov_G_E()
3409 Prefix pfx, in dis_movx_E_G() argument
3414 putIRegG(szd, pfx, rm, in dis_movx_E_G()
3417 getIRegE(szs,pfx,rm))); in dis_movx_E_G()
3421 nameIRegE(szs,pfx,rm), in dis_movx_E_G()
3422 nameIRegG(szd,pfx,rm)); in dis_movx_E_G()
3430 IRTemp addr = disAMode ( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_movx_E_G()
3431 putIRegG(szd, pfx, rm, in dis_movx_E_G()
3439 nameIRegG(szd,pfx,rm)); in dis_movx_E_G()
3513 Prefix pfx, in dis_Grp1() argument
3540 assign(dst0, getIRegE(sz,pfx,modrm)); in dis_Grp1()
3559 putIRegE(sz, pfx, modrm, mkexpr(dst1)); in dis_Grp1()
3564 nameIRegE(sz,pfx,modrm)); in dis_Grp1()
3566 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, /*xtra*/d_sz ); in dis_Grp1()
3572 if (haveLOCK(pfx)) { in dis_Grp1()
3583 if (haveLOCK(pfx)) { in dis_Grp1()
3595 if (haveLOCK(pfx)) { in dis_Grp1()
3623 Prefix pfx, in dis_Grp2() argument
3643 assign(dst0, getIRegE(sz, pfx, modrm)); in dis_Grp2()
3646 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, /*xtra*/d_sz ); in dis_Grp2()
3868 putIRegE(sz, pfx, modrm, mkexpr(dst1)); in dis_Grp2()
3876 vex_printf(", %s\n", nameIRegE(sz,pfx,modrm)); in dis_Grp2()
3897 Prefix pfx, in dis_Grp8_Imm() argument
3918 if (haveF2orF3(pfx)) { in dis_Grp8_Imm()
3925 if (haveF2orF3(pfx)) { in dis_Grp8_Imm()
3926 if (haveF2andF3(pfx) || !haveLOCK(pfx)) { in dis_Grp8_Imm()
3960 assign( t2, widenUto64(getIRegE(sz, pfx, modrm)) ); in dis_Grp8_Imm()
3964 src_val, nameIRegE(sz,pfx,modrm)); in dis_Grp8_Imm()
3967 t_addr = disAMode ( &len, vbi, pfx, delta, dis_buf, 1 ); in dis_Grp8_Imm()
3996 putIRegE(sz, pfx, modrm, narrowTo(ty, mkexpr(t2m))); in dis_Grp8_Imm()
3998 if (haveLOCK(pfx)) { in dis_Grp8_Imm()
4107 Prefix pfx, Int sz, Long delta, Bool* decode_OK ) in dis_Grp3() argument
4121 if (haveF2orF3(pfx)) goto unhandled; in dis_Grp3()
4129 getIRegE(sz,pfx,modrm), in dis_Grp3()
4134 nameIRegE(sz, pfx, modrm)); in dis_Grp3()
4142 putIRegE(sz, pfx, modrm, in dis_Grp3()
4144 getIRegE(sz, pfx, modrm))); in dis_Grp3()
4146 nameIRegE(sz, pfx, modrm)); in dis_Grp3()
4154 assign(src, getIRegE(sz, pfx, modrm)); in dis_Grp3()
4158 putIRegE(sz, pfx, modrm, mkexpr(dst1)); in dis_Grp3()
4159 DIP("neg%c %s\n", nameISize(sz), nameIRegE(sz, pfx, modrm)); in dis_Grp3()
4164 assign(src, getIRegE(sz,pfx,modrm)); in dis_Grp3()
4166 nameIRegE(sz,pfx,modrm) ); in dis_Grp3()
4171 assign(src, getIRegE(sz,pfx,modrm)); in dis_Grp3()
4173 nameIRegE(sz,pfx,modrm) ); in dis_Grp3()
4177 assign( t1, getIRegE(sz, pfx, modrm) ); in dis_Grp3()
4180 nameIRegE(sz, pfx, modrm)); in dis_Grp3()
4184 assign( t1, getIRegE(sz, pfx, modrm) ); in dis_Grp3()
4187 nameIRegE(sz, pfx, modrm)); in dis_Grp3()
4195 Bool validF2orF3 = haveF2orF3(pfx) ? False : True; in dis_Grp3()
4197 && haveF2orF3(pfx) && !haveF2andF3(pfx) && haveLOCK(pfx)) { in dis_Grp3()
4202 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, in dis_Grp3()
4230 if (haveLOCK(pfx)) { in dis_Grp3()
4246 if (haveLOCK(pfx)) { in dis_Grp3()
4285 Prefix pfx, Long delta, Bool* decode_OK ) in dis_Grp4() argument
4299 if (haveF2orF3(pfx)) goto unhandled; in dis_Grp4()
4300 assign(t1, getIRegE(1, pfx, modrm)); in dis_Grp4()
4304 putIRegE(1, pfx, modrm, mkexpr(t2)); in dis_Grp4()
4309 putIRegE(1, pfx, modrm, mkexpr(t2)); in dis_Grp4()
4318 nameIRegE(1, pfx, modrm)); in dis_Grp4()
4321 Bool validF2orF3 = haveF2orF3(pfx) ? False : True; in dis_Grp4()
4323 && haveF2orF3(pfx) && !haveF2andF3(pfx) && haveLOCK(pfx)) { in dis_Grp4()
4328 IRTemp addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_Grp4()
4333 if (haveLOCK(pfx)) { in dis_Grp4()
4343 if (haveLOCK(pfx)) { in dis_Grp4()
4369 Prefix pfx, Int sz, Long delta, in dis_Grp5() argument
4388 if (haveF2orF3(pfx) in dis_Grp5()
4389 && ! (haveF2(pfx) in dis_Grp5()
4392 assign(t1, getIRegE(sz,pfx,modrm)); in dis_Grp5()
4399 putIRegE(sz,pfx,modrm, mkexpr(t2)); in dis_Grp5()
4406 putIRegE(sz,pfx,modrm, mkexpr(t2)); in dis_Grp5()
4411 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_Grp5()
4414 assign(t3, getIRegE(sz,pfx,modrm)); in dis_Grp5()
4427 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_Grp5()
4430 assign(t3, getIRegE(sz,pfx,modrm)); in dis_Grp5()
4441 assign(t3, getIRegE(sz,pfx,modrm)); in dis_Grp5()
4458 nameIRegE(sz, pfx, modrm)); in dis_Grp5()
4461 Bool validF2orF3 = haveF2orF3(pfx) ? False : True; in dis_Grp5()
4463 && haveF2orF3(pfx) && !haveF2andF3(pfx) && haveLOCK(pfx)) { in dis_Grp5()
4466 && (haveF2(pfx) && !haveF3(pfx))) { in dis_Grp5()
4471 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_Grp5()
4481 if (haveLOCK(pfx)) { in dis_Grp5()
4493 if (haveLOCK(pfx)) { in dis_Grp5()
4504 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_Grp5()
4520 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_Grp5()
4580 void dis_string_op( void (*dis_OP)( Int, IRTemp, Prefix pfx ), in dis_string_op() argument
4581 Int sz, const HChar* name, Prefix pfx ) in dis_string_op()
4586 vassert(pfx == clearSegBits(pfx)); in dis_string_op()
4588 dis_OP( sz, t_inc, pfx ); in dis_string_op()
4593 void dis_MOVS ( Int sz, IRTemp t_inc, Prefix pfx ) in dis_MOVS() argument
4600 if (haveASO(pfx)) { in dis_MOVS()
4612 if (haveASO(pfx)) { in dis_MOVS()
4621 void dis_LODS ( Int sz, IRTemp t_inc, Prefix pfx ) in dis_LODS() argument
4627 if (haveASO(pfx)) in dis_LODS()
4635 if (haveASO(pfx)) in dis_LODS()
4641 void dis_STOS ( Int sz, IRTemp t_inc, Prefix pfx ) in dis_STOS() argument
4650 if (haveASO(pfx)) in dis_STOS()
4658 if (haveASO(pfx)) in dis_STOS()
4664 void dis_CMPS ( Int sz, IRTemp t_inc, Prefix pfx ) in dis_CMPS() argument
4673 if (haveASO(pfx)) { in dis_CMPS()
4689 if (haveASO(pfx)) { in dis_CMPS()
4698 void dis_SCAS ( Int sz, IRTemp t_inc, Prefix pfx ) in dis_SCAS() argument
4708 if (haveASO(pfx)) in dis_SCAS()
4718 if (haveASO(pfx)) in dis_SCAS()
4732 Prefix pfx ) in dis_REP_op() argument
4740 vassert(pfx == clearSegBits(pfx)); in dis_REP_op()
4742 if (haveASO(pfx)) { in dis_REP_op()
4755 if (haveASO(pfx)) in dis_REP_op()
4761 dis_OP (sz, t_inc, pfx); in dis_REP_op()
4785 Prefix pfx, in dis_mul_E_G() argument
4797 assign( tg, getIRegG(size, pfx, rm) ); in dis_mul_E_G()
4799 assign( te, getIRegE(size, pfx, rm) ); in dis_mul_E_G()
4801 IRTemp addr = disAMode( &alen, vbi, pfx, delta0, dis_buf, 0 ); in dis_mul_E_G()
4809 putIRegG(size, pfx, rm, mkexpr(resLo) ); in dis_mul_E_G()
4813 nameIRegE(size,pfx,rm), in dis_mul_E_G()
4814 nameIRegG(size,pfx,rm)); in dis_mul_E_G()
4819 nameIRegG(size,pfx,rm)); in dis_mul_E_G()
4828 Prefix pfx, in dis_imul_I_E_G() argument
4845 assign(te, getIRegE(size, pfx, rm)); in dis_imul_I_E_G()
4848 IRTemp addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_imul_I_E_G()
4863 putIRegG(size, pfx, rm, mkexpr(resLo)); in dis_imul_I_E_G()
4867 ( epartIsReg(rm) ? nameIRegE(size,pfx,rm) : dis_buf ), in dis_imul_I_E_G()
4868 nameIRegG(size,pfx,rm) ); in dis_imul_I_E_G()
5500 const VexAbiInfo* vbi, Prefix pfx, Long delta ) in dis_FPU() argument
5519 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_FPU()
5663 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_FPU()
6193 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_FPU()
6322 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_FPU()
6501 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_FPU()
6612 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_FPU()
6645 if ( have66(pfx) ) { in dis_FPU()
6716 if ( have66(pfx) ) { in dis_FPU()
6726 if ( have66(pfx) ) { in dis_FPU()
6781 if ( have66(pfx) ) { in dis_FPU()
6876 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_FPU()
6997 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_FPU()
7175 Prefix pfx, in dis_MMXop_regmem_to_reg() argument
7281 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_MMXop_regmem_to_reg()
7325 Prefix pfx, Long delta, in dis_MMX_shiftG_byE() argument
7345 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_MMX_shiftG_byE()
7457 const VexAbiInfo* vbi, Prefix pfx, Int sz, Long delta ) in dis_MMX() argument
7480 getIReg32(eregOfRexRM(pfx,modrm)) ) ); in dis_MMX()
7482 nameIReg32(eregOfRexRM(pfx,modrm)), in dis_MMX()
7485 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_MMX()
7502 getIReg64(eregOfRexRM(pfx,modrm)) ); in dis_MMX()
7504 nameIReg64(eregOfRexRM(pfx,modrm)), in dis_MMX()
7507 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_MMX()
7525 putIReg32( eregOfRexRM(pfx,modrm), in dis_MMX()
7529 nameIReg32(eregOfRexRM(pfx,modrm))); in dis_MMX()
7531 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_MMX()
7544 putIReg64( eregOfRexRM(pfx,modrm), in dis_MMX()
7548 nameIReg64(eregOfRexRM(pfx,modrm))); in dis_MMX()
7550 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_MMX()
7564 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx))) in dis_MMX()
7574 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_MMX()
7585 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx))) in dis_MMX()
7595 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_MMX()
7608 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "padd", True ); in dis_MMX()
7614 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx))) in dis_MMX()
7616 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "padds", True ); in dis_MMX()
7623 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "paddus", True ); in dis_MMX()
7631 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "psub", True ); in dis_MMX()
7638 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "psubs", True ); in dis_MMX()
7645 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "psubus", True ); in dis_MMX()
7651 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pmulhw", False ); in dis_MMX()
7657 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pmullw", False ); in dis_MMX()
7662 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pmaddwd", False ); in dis_MMX()
7670 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pcmpeq", True ); in dis_MMX()
7678 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pcmpgt", True ); in dis_MMX()
7684 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "packssdw", False ); in dis_MMX()
7690 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "packsswb", False ); in dis_MMX()
7696 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "packuswb", False ); in dis_MMX()
7703 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx))) in dis_MMX()
7705 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "punpckh", True ); in dis_MMX()
7712 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx))) in dis_MMX()
7714 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "punpckl", True ); in dis_MMX()
7720 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pand", False ); in dis_MMX()
7726 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pandn", False ); in dis_MMX()
7732 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "por", False ); in dis_MMX()
7738 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pxor", False ); in dis_MMX()
7742 delta = dis_MMX_shiftG_byE(vbi, pfx, delta, _name, _op); \ in dis_MMX()
7813 assign( addr, handleAddrOverrides( vbi, pfx, getIReg64(R_RDI) )); in dis_MMX()
7895 Prefix pfx, in dis_SHLRD_Gv_Ev() argument
7937 assign( gsrc, getIRegG(sz, pfx, modrm) ); in dis_SHLRD_Gv_Ev()
7941 assign( esrc, getIRegE(sz, pfx, modrm) ); in dis_SHLRD_Gv_Ev()
7945 nameIRegG(sz, pfx, modrm), nameIRegE(sz, pfx, modrm)); in dis_SHLRD_Gv_Ev()
7947 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, in dis_SHLRD_Gv_Ev()
7955 nameIRegG(sz, pfx, modrm), dis_buf); in dis_SHLRD_Gv_Ev()
8051 putIRegE(sz, pfx, modrm, mkexpr(resTy)); in dis_SHLRD_Gv_Ev()
8080 Prefix pfx, Int sz, Long delta, BtOp op, in dis_bt_G_E() argument
8106 if (haveF2orF3(pfx)) { in dis_bt_G_E()
8113 if (haveF2orF3(pfx)) { in dis_bt_G_E()
8114 if (haveF2andF3(pfx) || !haveLOCK(pfx) || op == BtOpNone) { in dis_bt_G_E()
8121 assign( t_bitno0, widenSto64(getIRegG(sz, pfx, modrm)) ); in dis_bt_G_E()
8146 storeLE( mkexpr(t_rsp), getIRegE(sz, pfx, modrm) ); in dis_bt_G_E()
8158 t_addr0 = disAMode ( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_bt_G_E()
8209 if ((haveLOCK(pfx)) && !epartIsReg(modrm)) { in dis_bt_G_E()
8254 putIRegE(sz, pfx, modrm, loadLE(szToITy(sz), mkexpr(t_rsp)) ); in dis_bt_G_E()
8259 nameBtOp(op), nameISize(sz), nameIRegG(sz, pfx, modrm), in dis_bt_G_E()
8260 ( epartIsReg(modrm) ? nameIRegE(sz, pfx, modrm) : dis_buf ) ); in dis_bt_G_E()
8270 Prefix pfx, Int sz, Long delta, Bool fwds ) in dis_bs_E_G() argument
8289 assign( src, getIRegE(sz, pfx, modrm) ); in dis_bs_E_G()
8292 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_bs_E_G()
8299 ( isReg ? nameIRegE(sz, pfx, modrm) : dis_buf ), in dis_bs_E_G()
8300 nameIRegG(sz, pfx, modrm)); in dis_bs_E_G()
8366 widenUto64( getIRegG( sz, pfx, modrm ) ) in dis_bs_E_G()
8379 putIRegG( sz, pfx, modrm, mkexpr(dst) ); in dis_bs_E_G()
8387 void codegen_xchg_rAX_Reg ( Prefix pfx, Int sz, UInt regLo3 ) in codegen_xchg_rAX_Reg() argument
8396 assign( t2, getIRegRexB(8, pfx, regLo3) ); in codegen_xchg_rAX_Reg()
8398 putIRegRexB(8, pfx, regLo3, mkexpr(t1) ); in codegen_xchg_rAX_Reg()
8401 assign( t2, getIRegRexB(4, pfx, regLo3) ); in codegen_xchg_rAX_Reg()
8403 putIRegRexB(4, pfx, regLo3, mkexpr(t1) ); in codegen_xchg_rAX_Reg()
8406 assign( t2, getIRegRexB(2, pfx, regLo3) ); in codegen_xchg_rAX_Reg()
8408 putIRegRexB(2, pfx, regLo3, mkexpr(t1) ); in codegen_xchg_rAX_Reg()
8412 nameIRegRexB(sz,pfx, regLo3)); in codegen_xchg_rAX_Reg()
8471 Prefix pfx, in dis_cmpxchg_G_E() argument
8503 if (haveF2orF3(pfx)) { in dis_cmpxchg_G_E()
8508 if (haveF2orF3(pfx)) { in dis_cmpxchg_G_E()
8509 if (haveF2andF3(pfx) || !haveLOCK(pfx)) { in dis_cmpxchg_G_E()
8518 assign( dest, getIRegE(size, pfx, rm) ); in dis_cmpxchg_G_E()
8520 assign( src, getIRegG(size, pfx, rm) ); in dis_cmpxchg_G_E()
8527 putIRegE(size, pfx, rm, mkexpr(dest2)); in dis_cmpxchg_G_E()
8529 nameIRegG(size,pfx,rm), in dis_cmpxchg_G_E()
8530 nameIRegE(size,pfx,rm) ); in dis_cmpxchg_G_E()
8532 else if (!epartIsReg(rm) && !haveLOCK(pfx)) { in dis_cmpxchg_G_E()
8534 addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 ); in dis_cmpxchg_G_E()
8537 assign( src, getIRegG(size, pfx, rm) ); in dis_cmpxchg_G_E()
8546 nameIRegG(size,pfx,rm), dis_buf); in dis_cmpxchg_G_E()
8548 else if (!epartIsReg(rm) && haveLOCK(pfx)) { in dis_cmpxchg_G_E()
8554 addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 ); in dis_cmpxchg_G_E()
8556 assign( src, getIRegG(size, pfx, rm) ); in dis_cmpxchg_G_E()
8567 nameIRegG(size,pfx,rm), dis_buf); in dis_cmpxchg_G_E()
8595 Prefix pfx, in dis_cmov_E_G() argument
8609 assign( tmps, getIRegE(sz, pfx, rm) ); in dis_cmov_E_G()
8610 assign( tmpd, getIRegG(sz, pfx, rm) ); in dis_cmov_E_G()
8612 putIRegG( sz, pfx, rm, in dis_cmov_E_G()
8618 nameIRegE(sz,pfx,rm), in dis_cmov_E_G()
8619 nameIRegG(sz,pfx,rm)); in dis_cmov_E_G()
8625 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 ); in dis_cmov_E_G()
8627 assign( tmpd, getIRegG(sz, pfx, rm) ); in dis_cmov_E_G()
8629 putIRegG( sz, pfx, rm, in dis_cmov_E_G()
8637 nameIRegG(sz,pfx,rm)); in dis_cmov_E_G()
8646 Prefix pfx, Int sz, Long delta0 ) in dis_xadd_G_E() argument
8670 assign( tmpd, getIRegE(sz, pfx, rm) ); in dis_xadd_G_E()
8671 assign( tmpt0, getIRegG(sz, pfx, rm) ); in dis_xadd_G_E()
8675 putIRegG(sz, pfx, rm, mkexpr(tmpd)); in dis_xadd_G_E()
8676 putIRegE(sz, pfx, rm, mkexpr(tmpt1)); in dis_xadd_G_E()
8678 nameISize(sz), nameIRegG(sz,pfx,rm), nameIRegE(sz,pfx,rm)); in dis_xadd_G_E()
8682 else if (!epartIsReg(rm) && !haveLOCK(pfx)) { in dis_xadd_G_E()
8684 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 ); in dis_xadd_G_E()
8686 assign( tmpt0, getIRegG(sz, pfx, rm) ); in dis_xadd_G_E()
8691 putIRegG(sz, pfx, rm, mkexpr(tmpd)); in dis_xadd_G_E()
8693 nameISize(sz), nameIRegG(sz,pfx,rm), dis_buf); in dis_xadd_G_E()
8697 else if (!epartIsReg(rm) && haveLOCK(pfx)) { in dis_xadd_G_E()
8699 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 ); in dis_xadd_G_E()
8701 assign( tmpt0, getIRegG(sz, pfx, rm) ); in dis_xadd_G_E()
8707 putIRegG(sz, pfx, rm, mkexpr(tmpd)); in dis_xadd_G_E()
8709 nameISize(sz), nameIRegG(sz,pfx,rm), dis_buf); in dis_xadd_G_E()
8795 Prefix pfx, in dis_mov_S_E() argument
8804 putIRegE(size, pfx, rm, mkU(szToITy(size), 0)); in dis_mov_S_E()
8805 DIP("mov %s,%s\n", nameSReg(gregOfRexRM(pfx, rm)), in dis_mov_S_E()
8806 nameIRegE(size, pfx, rm)); in dis_mov_S_E()
8812 IRTemp addr = disAMode(&len, vbi, pfx, delta0, dis_buf, 0); in dis_mov_S_E()
8814 DIP("mov %s,%s\n", nameSReg(gregOfRexRM(pfx, rm)), in dis_mov_S_E()
8902 Prefix pfx, Long delta, in dis_SSE_E_to_G_all_wrk() argument
8913 = invertG ? unop(Iop_NotV128, getXMMReg(gregOfRexRM(pfx,rm))) in dis_SSE_E_to_G_all_wrk()
8914 : getXMMReg(gregOfRexRM(pfx,rm)); in dis_SSE_E_to_G_all_wrk()
8917 gregOfRexRM(pfx,rm), in dis_SSE_E_to_G_all_wrk()
8921 getXMMReg(eregOfRexRM(pfx,rm))) in dis_SSE_E_to_G_all_wrk()
8923 getXMMReg(eregOfRexRM(pfx,rm))) in dis_SSE_E_to_G_all_wrk()
8926 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_SSE_E_to_G_all_wrk()
8927 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_all_wrk()
8930 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_SSE_E_to_G_all_wrk()
8932 gregOfRexRM(pfx,rm), in dis_SSE_E_to_G_all_wrk()
8942 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_all_wrk()
8952 Prefix pfx, Long delta, in dis_SSE_E_to_G_all() argument
8955 return dis_SSE_E_to_G_all_wrk( vbi, pfx, delta, opname, op, False ); in dis_SSE_E_to_G_all()
8962 Prefix pfx, Long delta, in dis_SSE_E_to_G_all_invG() argument
8965 return dis_SSE_E_to_G_all_wrk( vbi, pfx, delta, opname, op, True ); in dis_SSE_E_to_G_all_invG()
8972 Prefix pfx, Long delta, in dis_SSE_E_to_G_lo32() argument
8979 IRExpr* gpart = getXMMReg(gregOfRexRM(pfx,rm)); in dis_SSE_E_to_G_lo32()
8981 putXMMReg( gregOfRexRM(pfx,rm), in dis_SSE_E_to_G_lo32()
8983 getXMMReg(eregOfRexRM(pfx,rm))) ); in dis_SSE_E_to_G_lo32()
8985 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_SSE_E_to_G_lo32()
8986 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_lo32()
8992 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_SSE_E_to_G_lo32()
8995 putXMMReg( gregOfRexRM(pfx,rm), in dis_SSE_E_to_G_lo32()
8999 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_lo32()
9008 Prefix pfx, Long delta, in dis_SSE_E_to_G_lo64() argument
9015 IRExpr* gpart = getXMMReg(gregOfRexRM(pfx,rm)); in dis_SSE_E_to_G_lo64()
9017 putXMMReg( gregOfRexRM(pfx,rm), in dis_SSE_E_to_G_lo64()
9019 getXMMReg(eregOfRexRM(pfx,rm))) ); in dis_SSE_E_to_G_lo64()
9021 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_SSE_E_to_G_lo64()
9022 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_lo64()
9028 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_SSE_E_to_G_lo64()
9031 putXMMReg( gregOfRexRM(pfx,rm), in dis_SSE_E_to_G_lo64()
9035 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_lo64()
9045 Prefix pfx, Long delta, in dis_SSE_E_to_G_unary_all() argument
9057 IRExpr* src = getXMMReg(eregOfRexRM(pfx,rm)); in dis_SSE_E_to_G_unary_all()
9061 putXMMReg( gregOfRexRM(pfx,rm), res ); in dis_SSE_E_to_G_unary_all()
9063 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_SSE_E_to_G_unary_all()
9064 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_unary_all()
9067 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_SSE_E_to_G_unary_all()
9072 putXMMReg( gregOfRexRM(pfx,rm), res ); in dis_SSE_E_to_G_unary_all()
9075 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_unary_all()
9085 Prefix pfx, Long delta, in dis_SSE_E_to_G_unary_lo32() argument
9098 assign( oldG0, getXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_unary_lo32()
9104 getXMMRegLane32(eregOfRexRM(pfx,rm), 0)) ); in dis_SSE_E_to_G_unary_lo32()
9105 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) ); in dis_SSE_E_to_G_unary_lo32()
9107 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_SSE_E_to_G_unary_lo32()
9108 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_unary_lo32()
9111 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_SSE_E_to_G_unary_lo32()
9116 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) ); in dis_SSE_E_to_G_unary_lo32()
9119 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_unary_lo32()
9129 Prefix pfx, Long delta, in dis_SSE_E_to_G_unary_lo64() argument
9142 assign( oldG0, getXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_unary_lo64()
9148 getXMMRegLane64(eregOfRexRM(pfx,rm), 0)) ); in dis_SSE_E_to_G_unary_lo64()
9149 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) ); in dis_SSE_E_to_G_unary_lo64()
9151 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_SSE_E_to_G_unary_lo64()
9152 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_unary_lo64()
9155 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_SSE_E_to_G_unary_lo64()
9160 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) ); in dis_SSE_E_to_G_unary_lo64()
9163 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_unary_lo64()
9175 Prefix pfx, Long delta, in dis_SSEint_E_to_G() argument
9184 IRExpr* gpart = getXMMReg(gregOfRexRM(pfx,rm)); in dis_SSEint_E_to_G()
9187 epart = getXMMReg(eregOfRexRM(pfx,rm)); in dis_SSEint_E_to_G()
9189 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_SSEint_E_to_G()
9190 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSEint_E_to_G()
9193 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_SSEint_E_to_G()
9197 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSEint_E_to_G()
9200 putXMMReg( gregOfRexRM(pfx,rm), in dis_SSEint_E_to_G()
9343 Prefix pfx, Long delta, in dis_SSE_cmp_E_to_G() argument
9364 assign( plain, binop(op, getXMMReg(gregOfRexRM(pfx,rm)), in dis_SSE_cmp_E_to_G()
9365 getXMMReg(eregOfRexRM(pfx,rm))) ); in dis_SSE_cmp_E_to_G()
9369 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_SSE_cmp_E_to_G()
9370 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_cmp_E_to_G()
9372 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_SSE_cmp_E_to_G()
9381 getXMMReg(gregOfRexRM(pfx,rm)), in dis_SSE_cmp_E_to_G()
9394 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_cmp_E_to_G()
9398 putXMMReg( gregOfRexRM(pfx,rm), in dis_SSE_cmp_E_to_G()
9404 putXMMReg( gregOfRexRM(pfx,rm), in dis_SSE_cmp_E_to_G()
9408 putXMMReg( gregOfRexRM(pfx,rm), mkexpr(plain) ); in dis_SSE_cmp_E_to_G()
9419 Prefix pfx, Long delta, in dis_SSE_shiftG_byE() argument
9432 assign( amt, getXMMRegLane64(eregOfRexRM(pfx,rm), 0) ); in dis_SSE_shiftG_byE()
9434 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_SSE_shiftG_byE()
9435 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_shiftG_byE()
9438 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_SSE_shiftG_byE()
9442 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_shiftG_byE()
9445 assign( g0, getXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_shiftG_byE()
9485 putXMMReg( gregOfRexRM(pfx,rm), mkexpr(g1) ); in dis_SSE_shiftG_byE()
9493 ULong dis_SSE_shiftE_imm ( Prefix pfx, in dis_SSE_shiftE_imm() argument
9508 nameXMMReg(eregOfRexRM(pfx,rm)) ); in dis_SSE_shiftE_imm()
9509 assign( e0, getXMMReg(eregOfRexRM(pfx,rm)) ); in dis_SSE_shiftE_imm()
9540 putXMMReg( eregOfRexRM(pfx,rm), mkexpr(e1) ); in dis_SSE_shiftE_imm()
10174 static Long dis_COMISD ( const VexAbiInfo* vbi, Prefix pfx, in dis_COMISD() argument
10185 assign( argR, getXMMRegLane64F( eregOfRexRM(pfx,modrm), in dis_COMISD()
10190 nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_COMISD()
10191 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_COMISD()
10193 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_COMISD()
10199 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_COMISD()
10201 assign( argL, getXMMRegLane64F( gregOfRexRM(pfx,modrm), in dis_COMISD()
10217 static Long dis_COMISS ( const VexAbiInfo* vbi, Prefix pfx, in dis_COMISS() argument
10228 assign( argR, getXMMRegLane32F( eregOfRexRM(pfx,modrm), in dis_COMISS()
10233 nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_COMISS()
10234 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_COMISS()
10236 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_COMISS()
10242 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_COMISS()
10244 assign( argL, getXMMRegLane32F( gregOfRexRM(pfx,modrm), in dis_COMISS()
10262 static Long dis_PSHUFD_32x4 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PSHUFD_32x4() argument
10273 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) ); in dis_PSHUFD_32x4()
10277 nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_PSHUFD_32x4()
10278 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_PSHUFD_32x4()
10280 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, in dis_PSHUFD_32x4()
10287 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_PSHUFD_32x4()
10303 (gregOfRexRM(pfx,modrm), mkexpr(dV)); in dis_PSHUFD_32x4()
10308 static Long dis_PSHUFD_32x8 ( const VexAbiInfo* vbi, Prefix pfx, Long delta ) in dis_PSHUFD_32x8() argument
10316 UInt rG = gregOfRexRM(pfx,modrm); in dis_PSHUFD_32x8()
10318 UInt rE = eregOfRexRM(pfx,modrm); in dis_PSHUFD_32x8()
10324 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, in dis_PSHUFD_32x8()
10443 static Long dis_CVTxSD2SI ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTxSD2SI() argument
10457 assign(f64lo, getXMMRegLane64F(eregOfRexRM(pfx,modrm), 0)); in dis_CVTxSD2SI()
10459 nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_CVTxSD2SI()
10460 nameIReg(sz, gregOfRexRM(pfx,modrm), in dis_CVTxSD2SI()
10463 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTxSD2SI()
10468 nameIReg(sz, gregOfRexRM(pfx,modrm), in dis_CVTxSD2SI()
10479 putIReg32( gregOfRexRM(pfx,modrm), in dis_CVTxSD2SI()
10483 putIReg64( gregOfRexRM(pfx,modrm), in dis_CVTxSD2SI()
10491 static Long dis_CVTxSS2SI ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTxSS2SI() argument
10505 assign(f32lo, getXMMRegLane32F(eregOfRexRM(pfx,modrm), 0)); in dis_CVTxSS2SI()
10507 nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_CVTxSS2SI()
10508 nameIReg(sz, gregOfRexRM(pfx,modrm), in dis_CVTxSS2SI()
10511 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTxSS2SI()
10516 nameIReg(sz, gregOfRexRM(pfx,modrm), in dis_CVTxSS2SI()
10527 putIReg32( gregOfRexRM(pfx,modrm), in dis_CVTxSS2SI()
10533 putIReg64( gregOfRexRM(pfx,modrm), in dis_CVTxSS2SI()
10543 static Long dis_CVTPS2PD_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTPS2PD_128() argument
10552 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTPS2PD_128()
10554 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTPS2PD_128()
10561 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTPS2PD_128()
10578 static Long dis_CVTPS2PD_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTPS2PD_256() argument
10589 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTPS2PD_256()
10591 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTPS2PD_256()
10599 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTPS2PD_256()
10619 static Long dis_CVTPD2PS_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTPD2PS_128() argument
10626 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTPD2PS_128()
10630 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTPD2PS_128()
10636 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTPD2PS_128()
10664 static Long dis_CVTxPS2DQ_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTxPS2DQ_128() argument
10673 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTxPS2DQ_128()
10677 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTxPS2DQ_128()
10683 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTxPS2DQ_128()
10714 static Long dis_CVTxPS2DQ_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTxPS2DQ_256() argument
10723 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTxPS2DQ_256()
10727 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTxPS2DQ_256()
10733 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTxPS2DQ_256()
10766 static Long dis_CVTxPD2DQ_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTxPD2DQ_128() argument
10775 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTxPD2DQ_128()
10779 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTxPD2DQ_128()
10785 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTxPD2DQ_128()
10821 static Long dis_CVTxPD2DQ_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTxPD2DQ_256() argument
10830 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTxPD2DQ_256()
10834 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTxPD2DQ_256()
10840 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTxPD2DQ_256()
10875 static Long dis_CVTDQ2PS_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTDQ2PS_128() argument
10884 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTDQ2PS_128()
10888 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTDQ2PS_128()
10894 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTDQ2PS_128()
10923 static Long dis_CVTDQ2PS_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTDQ2PS_256() argument
10932 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTDQ2PS_256()
10936 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTDQ2PS_256()
10941 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTDQ2PS_256()
10976 static Long dis_PMOVMSKB_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVMSKB_128() argument
10981 UInt rE = eregOfRexRM(pfx,modrm); in dis_PMOVMSKB_128()
10982 UInt rG = gregOfRexRM(pfx,modrm); in dis_PMOVMSKB_128()
10995 static Long dis_PMOVMSKB_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVMSKB_256() argument
11000 UInt rE = eregOfRexRM(pfx,modrm); in dis_PMOVMSKB_256()
11001 UInt rG = gregOfRexRM(pfx,modrm); in dis_PMOVMSKB_256()
11450 static Long dis_PSHUFxW_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PSHUFxW_128() argument
11457 UInt rG = gregOfRexRM(pfx,modrm); in dis_PSHUFxW_128()
11467 UInt rE = eregOfRexRM(pfx,modrm); in dis_PSHUFxW_128()
11475 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_PSHUFxW_128()
11505 static Long dis_PSHUFxW_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PSHUFxW_256() argument
11512 UInt rG = gregOfRexRM(pfx,modrm); in dis_PSHUFxW_256()
11521 UInt rE = eregOfRexRM(pfx,modrm); in dis_PSHUFxW_256()
11528 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_PSHUFxW_256()
11552 static Long dis_PEXTRW_128_EregOnly_toG ( const VexAbiInfo* vbi, Prefix pfx, in dis_PEXTRW_128_EregOnly_toG() argument
11557 UInt rG = gregOfRexRM(pfx,modrm); in dis_PEXTRW_128_EregOnly_toG()
11563 UInt rE = eregOfRexRM(pfx,modrm); in dis_PEXTRW_128_EregOnly_toG()
11591 static Long dis_CVTDQ2PD_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTDQ2PD_128() argument
11599 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTDQ2PD_128()
11602 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTDQ2PD_128()
11607 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTDQ2PD_128()
11626 static Long dis_STMXCSR ( const VexAbiInfo* vbi, Prefix pfx, in dis_STMXCSR() argument
11634 vassert(gregOfRexRM(pfx,modrm) == 3); /* ditto */ in dis_STMXCSR()
11636 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_STMXCSR()
11658 static Long dis_LDMXCSR ( const VexAbiInfo* vbi, Prefix pfx, in dis_LDMXCSR() argument
11666 vassert(gregOfRexRM(pfx,modrm) == 2); /* ditto */ in dis_LDMXCSR()
11671 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_LDMXCSR()
11843 Prefix pfx, Long delta, Int sz ) in dis_XSAVE() argument
11858 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_XSAVE()
11892 Prefix pfx, Long delta, Int sz ) in dis_FXSAVE() argument
11902 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_FXSAVE()
12102 Prefix pfx, Long delta, Int sz ) in dis_XRSTOR() argument
12113 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_XRSTOR()
12172 Prefix pfx, Long delta, Int sz ) in dis_FXRSTOR() argument
12183 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_FXRSTOR()
12265 static Long dis_MASKMOVDQU ( const VexAbiInfo* vbi, Prefix pfx, in dis_MASKMOVDQU() argument
12274 UInt rG = gregOfRexRM(pfx,modrm); in dis_MASKMOVDQU()
12275 UInt rE = eregOfRexRM(pfx,modrm); in dis_MASKMOVDQU()
12277 assign( addr, handleAddrOverrides( vbi, pfx, getIReg64(R_RDI) )); in dis_MASKMOVDQU()
12286 getXMMRegLane64( eregOfRexRM(pfx,modrm), 1 ), in dis_MASKMOVDQU()
12289 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 ), in dis_MASKMOVDQU()
12308 static Long dis_MOVMSKPS_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_MOVMSKPS_128() argument
12312 UInt rG = gregOfRexRM(pfx,modrm); in dis_MOVMSKPS_128()
12313 UInt rE = eregOfRexRM(pfx,modrm); in dis_MOVMSKPS_128()
12340 static Long dis_MOVMSKPS_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta ) in dis_MOVMSKPS_256() argument
12343 UInt rG = gregOfRexRM(pfx,modrm); in dis_MOVMSKPS_256()
12344 UInt rE = eregOfRexRM(pfx,modrm); in dis_MOVMSKPS_256()
12390 static Long dis_MOVMSKPD_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_MOVMSKPD_128() argument
12394 UInt rG = gregOfRexRM(pfx,modrm); in dis_MOVMSKPD_128()
12395 UInt rE = eregOfRexRM(pfx,modrm); in dis_MOVMSKPD_128()
12412 static Long dis_MOVMSKPD_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta ) in dis_MOVMSKPD_256() argument
12415 UInt rG = gregOfRexRM(pfx,modrm); in dis_MOVMSKPD_256()
12416 UInt rE = eregOfRexRM(pfx,modrm); in dis_MOVMSKPD_256()
12448 Prefix pfx, Int sz, Long deltaIN, in dis_ESC_0F__SSE2() argument
12471 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12476 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12477 getXMMReg( eregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
12478 DIP("movupd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12479 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12482 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12483 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12486 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12494 if (haveF2no66noF3(pfx) in dis_ESC_0F__SSE2()
12498 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
12499 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 )); in dis_ESC_0F__SSE2()
12500 DIP("movsd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12501 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12504 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12505 putXMMReg( gregOfRexRM(pfx,modrm), mkV128(0) ); in dis_ESC_0F__SSE2()
12506 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
12509 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12516 if (haveF3no66noF2(pfx) in dis_ESC_0F__SSE2()
12520 putXMMRegLane32( gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
12521 getXMMRegLane32( eregOfRexRM(pfx,modrm), 0 )); in dis_ESC_0F__SSE2()
12522 DIP("movss %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12523 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12526 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12527 putXMMReg( gregOfRexRM(pfx,modrm), mkV128(0) ); in dis_ESC_0F__SSE2()
12528 putXMMRegLane32( gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
12531 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12537 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12541 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12542 getXMMReg( eregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
12543 DIP("movups %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12544 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12547 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12548 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12551 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12561 if (haveF2no66noF3(pfx) in dis_ESC_0F__SSE2()
12565 putXMMRegLane64( eregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
12566 getXMMRegLane64( gregOfRexRM(pfx,modrm), 0 )); in dis_ESC_0F__SSE2()
12567 DIP("movsd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12568 nameXMMReg(eregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12571 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12573 getXMMRegLane64(gregOfRexRM(pfx,modrm), 0) ); in dis_ESC_0F__SSE2()
12574 DIP("movsd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12582 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12587 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12589 getXMMRegLane32(gregOfRexRM(pfx,modrm), 0) ); in dis_ESC_0F__SSE2()
12590 DIP("movss %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12597 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12601 putXMMReg( eregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12602 getXMMReg( gregOfRexRM(pfx,modrm) ) ); in dis_ESC_0F__SSE2()
12603 DIP("movupd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12604 nameXMMReg(eregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12607 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12608 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
12609 DIP("movupd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12616 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12622 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12623 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
12624 DIP("movups %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12635 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12641 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12643 putXMMRegLane64( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12647 dis_buf, nameXMMReg( gregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
12653 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12658 putXMMRegLane64( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12660 getXMMRegLane64( eregOfRexRM(pfx,modrm), 1 )); in dis_ESC_0F__SSE2()
12661 DIP("movhlps %s, %s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12662 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12664 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12666 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0/*lower lane*/, in dis_ESC_0F__SSE2()
12669 dis_buf, nameXMMReg( gregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
12677 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12681 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12684 getXMMRegLane64( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12686 DIP("movlps %s, %s\n", nameXMMReg( gregOfRexRM(pfx,modrm) ), in dis_ESC_0F__SSE2()
12694 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12698 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12701 getXMMRegLane64( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12703 DIP("movlpd %s, %s\n", nameXMMReg( gregOfRexRM(pfx,modrm) ), in dis_ESC_0F__SSE2()
12716 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12721 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
12724 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
12730 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12743 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12749 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
12752 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
12758 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12774 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12780 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12782 putXMMRegLane64( gregOfRexRM(pfx,modrm), 1/*upper lane*/, in dis_ESC_0F__SSE2()
12785 nameXMMReg( gregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
12791 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12796 putXMMRegLane64( gregOfRexRM(pfx,modrm), 1/*upper lane*/, in dis_ESC_0F__SSE2()
12797 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 ) ); in dis_ESC_0F__SSE2()
12798 DIP("movhps %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12799 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12801 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12803 putXMMRegLane64( gregOfRexRM(pfx,modrm), 1/*upper lane*/, in dis_ESC_0F__SSE2()
12806 nameXMMReg( gregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
12814 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12818 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12821 getXMMRegLane64( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12823 DIP("movhps %s,%s\n", nameXMMReg( gregOfRexRM(pfx,modrm) ), in dis_ESC_0F__SSE2()
12831 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12835 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12838 getXMMRegLane64( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12840 DIP("movhpd %s,%s\n", nameXMMReg( gregOfRexRM(pfx,modrm) ), in dis_ESC_0F__SSE2()
12853 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12862 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12880 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12884 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12885 getXMMReg( eregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
12886 DIP("movapd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12887 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12890 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12892 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12895 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12901 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12905 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12906 getXMMReg( eregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
12907 DIP("movaps %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12908 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12911 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12913 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12916 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12925 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12929 putXMMReg( eregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12930 getXMMReg( gregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
12931 DIP("movaps %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12932 nameXMMReg(eregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12935 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12937 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
12938 DIP("movaps %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12945 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12949 putXMMReg( eregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12950 getXMMReg( gregOfRexRM(pfx,modrm) ) ); in dis_ESC_0F__SSE2()
12951 DIP("movapd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12952 nameXMMReg(eregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12955 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12957 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
12958 DIP("movapd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12969 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12981 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12983 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12987 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
12993 gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
13000 gregOfRexRM(pfx,modrm), 1, in dis_ESC_0F__SSE2()
13011 if (haveF3no66noF2(pfx) && (sz == 4 || sz == 8)) { in dis_ESC_0F__SSE2()
13018 assign( arg32, getIReg32(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
13020 DIP("cvtsi2ss %s,%s\n", nameIReg32(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13021 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13023 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13027 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
13030 gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
13038 assign( arg64, getIReg64(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
13040 DIP("cvtsi2ssq %s,%s\n", nameIReg64(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13041 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13043 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13047 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
13050 gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
13061 if (haveF2no66noF3(pfx) && (sz == 4 || sz == 8)) { in dis_ESC_0F__SSE2()
13066 assign( arg32, getIReg32(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
13068 DIP("cvtsi2sdl %s,%s\n", nameIReg32(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13069 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13071 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13075 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
13077 putXMMRegLane64F( gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
13084 assign( arg64, getIReg64(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
13086 DIP("cvtsi2sdq %s,%s\n", nameIReg64(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13087 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13089 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13093 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
13096 gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13108 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13123 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13125 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13129 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
13133 gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
13138 gregOfRexRM(pfx,modrm), 1, in dis_ESC_0F__SSE2()
13149 if ( (haveNo66noF2noF3(pfx) && sz == 4) in dis_ESC_0F__SSE2()
13150 || (have66noF2noF3(pfx) && sz == 2) ) { in dis_ESC_0F__SSE2()
13153 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13155 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
13158 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13172 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13184 assign(f32lo, getXMMRegLane32F(eregOfRexRM(pfx,modrm), 0)); in dis_ESC_0F__SSE2()
13185 assign(f32hi, getXMMRegLane32F(eregOfRexRM(pfx,modrm), 1)); in dis_ESC_0F__SSE2()
13187 nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13190 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13234 if (haveF3no66noF2(pfx) && (sz == 4 || sz == 8)) { in dis_ESC_0F__SSE2()
13235 delta = dis_CVTxSS2SI( vbi, pfx, delta, False/*!isAvx*/, opc, sz); in dis_ESC_0F__SSE2()
13250 if (haveF2no66noF3(pfx) && (sz == 4 || sz == 8)) { in dis_ESC_0F__SSE2()
13251 delta = dis_CVTxSD2SI( vbi, pfx, delta, False/*!isAvx*/, opc, sz); in dis_ESC_0F__SSE2()
13258 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13270 assign(f64lo, getXMMRegLane64F(eregOfRexRM(pfx,modrm), 0)); in dis_ESC_0F__SSE2()
13271 assign(f64hi, getXMMRegLane64F(eregOfRexRM(pfx,modrm), 1)); in dis_ESC_0F__SSE2()
13273 nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13276 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13310 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13311 delta = dis_COMISD( vbi, pfx, delta, False/*!isAvx*/, opc ); in dis_ESC_0F__SSE2()
13316 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13317 delta = dis_COMISS( vbi, pfx, delta, False/*!isAvx*/, opc ); in dis_ESC_0F__SSE2()
13325 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8) in dis_ESC_0F__SSE2()
13345 delta = dis_MOVMSKPS_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F__SSE2()
13350 if (have66noF2noF3(pfx) && (sz == 2 || sz == 8)) { in dis_ESC_0F__SSE2()
13356 delta = dis_MOVMSKPD_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F__SSE2()
13363 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13364 delta = dis_SSE_E_to_G_unary_lo32( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13369 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13370 delta = dis_SSE_E_to_G_unary_all( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13375 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13376 delta = dis_SSE_E_to_G_unary_lo64( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13381 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13382 delta = dis_SSE_E_to_G_unary_all( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13390 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13391 delta = dis_SSE_E_to_G_unary_lo32( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13396 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13397 delta = dis_SSE_E_to_G_unary_all( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13405 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13406 delta = dis_SSE_E_to_G_unary_lo32( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13411 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13412 delta = dis_SSE_E_to_G_unary_all( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13420 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13421 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "andps", Iop_AndV128 ); in dis_ESC_0F__SSE2()
13425 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13426 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "andpd", Iop_AndV128 ); in dis_ESC_0F__SSE2()
13433 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13434 delta = dis_SSE_E_to_G_all_invG( vbi, pfx, delta, "andnps", in dis_ESC_0F__SSE2()
13439 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13440 delta = dis_SSE_E_to_G_all_invG( vbi, pfx, delta, "andnpd", in dis_ESC_0F__SSE2()
13448 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13449 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "orps", Iop_OrV128 ); in dis_ESC_0F__SSE2()
13453 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13454 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "orpd", Iop_OrV128 ); in dis_ESC_0F__SSE2()
13461 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13462 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "xorpd", Iop_XorV128 ); in dis_ESC_0F__SSE2()
13466 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13467 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "xorps", Iop_XorV128 ); in dis_ESC_0F__SSE2()
13474 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13475 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "addps", Iop_Add32Fx4 ); in dis_ESC_0F__SSE2()
13479 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13480 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "addss", Iop_Add32F0x4 ); in dis_ESC_0F__SSE2()
13484 if (haveF2no66noF3(pfx) in dis_ESC_0F__SSE2()
13486 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "addsd", Iop_Add64F0x2 ); in dis_ESC_0F__SSE2()
13490 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
13492 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "addpd", Iop_Add64Fx2 ); in dis_ESC_0F__SSE2()
13499 if (haveF2no66noF3(pfx) in dis_ESC_0F__SSE2()
13501 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "mulsd", Iop_Mul64F0x2 ); in dis_ESC_0F__SSE2()
13505 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13506 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "mulss", Iop_Mul32F0x4 ); in dis_ESC_0F__SSE2()
13510 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13511 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "mulps", Iop_Mul32Fx4 ); in dis_ESC_0F__SSE2()
13515 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
13517 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "mulpd", Iop_Mul64Fx2 ); in dis_ESC_0F__SSE2()
13525 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
13527 delta = dis_CVTPS2PD_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F__SSE2()
13532 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13538 assign(f32lo, getXMMRegLane32F(eregOfRexRM(pfx,modrm), 0)); in dis_ESC_0F__SSE2()
13539 DIP("cvtss2sd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13540 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13542 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13546 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13549 putXMMRegLane64F( gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
13556 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13563 assign(f64lo, getXMMRegLane64F(eregOfRexRM(pfx,modrm), 0)); in dis_ESC_0F__SSE2()
13564 DIP("cvtsd2ss %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13565 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13567 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13571 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13576 gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
13587 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13588 delta = dis_CVTPD2PS_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F__SSE2()
13598 if ( (have66noF2noF3(pfx) && sz == 2) in dis_ESC_0F__SSE2()
13599 || (haveF3no66noF2(pfx) && sz == 4) ) { in dis_ESC_0F__SSE2()
13601 delta = dis_CVTxPS2DQ_128( vbi, pfx, delta, False/*!isAvx*/, r2zero ); in dis_ESC_0F__SSE2()
13606 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13607 delta = dis_CVTDQ2PS_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F__SSE2()
13614 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13615 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "subss", Iop_Sub32F0x4 ); in dis_ESC_0F__SSE2()
13619 if (haveF2no66noF3(pfx) in dis_ESC_0F__SSE2()
13621 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "subsd", Iop_Sub64F0x2 ); in dis_ESC_0F__SSE2()
13625 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13626 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "subps", Iop_Sub32Fx4 ); in dis_ESC_0F__SSE2()
13630 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13631 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "subpd", Iop_Sub64Fx2 ); in dis_ESC_0F__SSE2()
13638 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13639 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "minps", Iop_Min32Fx4 ); in dis_ESC_0F__SSE2()
13643 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13644 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "minss", Iop_Min32F0x4 ); in dis_ESC_0F__SSE2()
13648 if (haveF2no66noF3(pfx) in dis_ESC_0F__SSE2()
13650 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "minsd", Iop_Min64F0x2 ); in dis_ESC_0F__SSE2()
13654 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13655 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "minpd", Iop_Min64Fx2 ); in dis_ESC_0F__SSE2()
13662 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13663 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "divsd", Iop_Div64F0x2 ); in dis_ESC_0F__SSE2()
13667 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13668 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "divps", Iop_Div32Fx4 ); in dis_ESC_0F__SSE2()
13672 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13673 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "divss", Iop_Div32F0x4 ); in dis_ESC_0F__SSE2()
13677 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13678 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "divpd", Iop_Div64Fx2 ); in dis_ESC_0F__SSE2()
13685 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13686 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "maxps", Iop_Max32Fx4 ); in dis_ESC_0F__SSE2()
13690 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13691 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "maxss", Iop_Max32F0x4 ); in dis_ESC_0F__SSE2()
13695 if (haveF2no66noF3(pfx) in dis_ESC_0F__SSE2()
13697 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "maxsd", Iop_Max64F0x2 ); in dis_ESC_0F__SSE2()
13701 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13702 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "maxpd", Iop_Max64Fx2 ); in dis_ESC_0F__SSE2()
13709 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13710 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13719 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13720 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13729 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13730 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13739 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13740 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13749 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13750 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13758 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13759 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13767 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13768 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13776 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13777 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13786 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13787 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13796 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13797 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13806 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13807 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13816 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13817 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13826 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13827 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13836 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13837 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13849 if (have66noF2noF3(pfx)) { in dis_ESC_0F__SSE2()
13857 gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13858 unop( Iop_32UtoV128, getIReg32(eregOfRexRM(pfx,modrm)) ) in dis_ESC_0F__SSE2()
13860 DIP("movd %s, %s\n", nameIReg32(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13861 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13864 gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13865 unop( Iop_64UtoV128, getIReg64(eregOfRexRM(pfx,modrm)) ) in dis_ESC_0F__SSE2()
13867 DIP("movq %s, %s\n", nameIReg64(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13868 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13871 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13874 gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13880 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13887 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
13892 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13893 getXMMReg( eregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
13894 DIP("movdqa %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13895 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13898 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13900 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13903 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13908 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13912 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13913 getXMMReg( eregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
13914 DIP("movdqu %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13915 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13918 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13919 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13922 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13931 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13932 delta = dis_PSHUFD_32x4( vbi, pfx, delta, False/*!writesYmm*/); in dis_ESC_0F__SSE2()
13937 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13953 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F__SSE2()
13975 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13976 delta = dis_PSHUFxW_128( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13982 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13983 delta = dis_PSHUFxW_128( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13991 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
13994 delta = dis_SSE_shiftE_imm( pfx, delta, "psrlw", Iop_ShrN16x8 ); in dis_ESC_0F__SSE2()
13998 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
14001 delta = dis_SSE_shiftE_imm( pfx, delta, "psraw", Iop_SarN16x8 ); in dis_ESC_0F__SSE2()
14005 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
14008 delta = dis_SSE_shiftE_imm( pfx, delta, "psllw", Iop_ShlN16x8 ); in dis_ESC_0F__SSE2()
14015 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
14018 delta = dis_SSE_shiftE_imm( pfx, delta, "psrld", Iop_ShrN32x4 ); in dis_ESC_0F__SSE2()
14022 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
14025 delta = dis_SSE_shiftE_imm( pfx, delta, "psrad", Iop_SarN32x4 ); in dis_ESC_0F__SSE2()
14029 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
14032 delta = dis_SSE_shiftE_imm( pfx, delta, "pslld", Iop_ShlN32x4 ); in dis_ESC_0F__SSE2()
14040 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
14044 Int reg = eregOfRexRM(pfx,getUChar(delta)); in dis_ESC_0F__SSE2()
14054 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
14058 Int reg = eregOfRexRM(pfx,getUChar(delta)); in dis_ESC_0F__SSE2()
14068 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
14071 delta = dis_SSE_shiftE_imm( pfx, delta, "psrlq", Iop_ShrN64x2 ); in dis_ESC_0F__SSE2()
14075 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
14078 delta = dis_SSE_shiftE_imm( pfx, delta, "psllq", Iop_ShlN64x2 ); in dis_ESC_0F__SSE2()
14085 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14086 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14094 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14095 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14103 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14104 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14113 if (haveF3no66noF2(pfx) in dis_ESC_0F__SSE2()
14117 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
14118 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 )); in dis_ESC_0F__SSE2()
14120 putXMMRegLane64( gregOfRexRM(pfx,modrm), 1, mkU64(0) ); in dis_ESC_0F__SSE2()
14121 DIP("movsd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
14122 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
14125 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
14126 putXMMReg( gregOfRexRM(pfx,modrm), mkV128(0) ); in dis_ESC_0F__SSE2()
14127 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
14130 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
14137 if (have66noF2noF3(pfx) && (sz == 2 || sz == 8)) { in dis_ESC_0F__SSE2()
14143 putIReg32( eregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
14144 getXMMRegLane32(gregOfRexRM(pfx,modrm), 0) ); in dis_ESC_0F__SSE2()
14145 DIP("movd %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
14146 nameIReg32(eregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
14148 putIReg64( eregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
14149 getXMMRegLane64(gregOfRexRM(pfx,modrm), 0) ); in dis_ESC_0F__SSE2()
14150 DIP("movq %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
14151 nameIReg64(eregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
14154 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
14158 ? getXMMRegLane32(gregOfRexRM(pfx,modrm),0) in dis_ESC_0F__SSE2()
14159 : getXMMRegLane64(gregOfRexRM(pfx,modrm),0) ); in dis_ESC_0F__SSE2()
14161 nameXMMReg(gregOfRexRM(pfx,modrm)), dis_buf); in dis_ESC_0F__SSE2()
14169 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14174 putXMMReg( eregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
14175 getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
14176 DIP("movdqu %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
14177 nameXMMReg(eregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
14179 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
14181 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
14182 DIP("movdqu %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), dis_buf); in dis_ESC_0F__SSE2()
14187 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14191 putXMMReg( eregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
14192 getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
14193 DIP("movdqa %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
14194 nameXMMReg(eregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
14196 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
14199 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
14200 DIP("movdqa %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), dis_buf); in dis_ESC_0F__SSE2()
14208 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
14221 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
14235 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
14246 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
14265 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
14268 delta = dis_STMXCSR(vbi, pfx, delta, False/*!isAvx*/); in dis_ESC_0F__SSE2()
14272 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
14275 delta = dis_LDMXCSR(vbi, pfx, delta, False/*!isAvx*/); in dis_ESC_0F__SSE2()
14279 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8) in dis_ESC_0F__SSE2()
14281 && gregOfRexRM(pfx,getUChar(delta)) == 0) { in dis_ESC_0F__SSE2()
14282 delta = dis_FXSAVE(vbi, pfx, delta, sz); in dis_ESC_0F__SSE2()
14286 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8) in dis_ESC_0F__SSE2()
14288 && gregOfRexRM(pfx,getUChar(delta)) == 1) { in dis_ESC_0F__SSE2()
14289 delta = dis_FXRSTOR(vbi, pfx, delta, sz); in dis_ESC_0F__SSE2()
14293 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8) in dis_ESC_0F__SSE2()
14295 && gregOfRexRM(pfx,getUChar(delta)) == 4 in dis_ESC_0F__SSE2()
14297 delta = dis_XSAVE(vbi, pfx, delta, sz); in dis_ESC_0F__SSE2()
14301 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8) in dis_ESC_0F__SSE2()
14303 && gregOfRexRM(pfx,getUChar(delta)) == 5 in dis_ESC_0F__SSE2()
14305 delta = dis_XRSTOR(vbi, pfx, delta, sz); in dis_ESC_0F__SSE2()
14312 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14314 delta = dis_SSE_cmp_E_to_G( vbi, pfx, delta, "cmpps", True, 4 ); in dis_ESC_0F__SSE2()
14318 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14320 delta = dis_SSE_cmp_E_to_G( vbi, pfx, delta, "cmpss", False, 4 ); in dis_ESC_0F__SSE2()
14324 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14326 delta = dis_SSE_cmp_E_to_G( vbi, pfx, delta, "cmpsd", False, 8 ); in dis_ESC_0F__SSE2()
14330 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14332 delta = dis_SSE_cmp_E_to_G( vbi, pfx, delta, "cmppd", True, 8 ); in dis_ESC_0F__SSE2()
14339 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8)) { in dis_ESC_0F__SSE2()
14342 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
14343 storeLE( mkexpr(addr), getIRegG(sz, pfx, modrm) ); in dis_ESC_0F__SSE2()
14345 nameIRegG(sz, pfx, modrm)); in dis_ESC_0F__SSE2()
14357 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
14373 assign(t4, getIReg16(eregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
14377 nameIReg16(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
14380 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F__SSE2()
14401 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
14406 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
14408 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
14415 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F__SSE2()
14435 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8)) { in dis_ESC_0F__SSE2()
14451 putIReg64(gregOfRexRM(pfx,modrm), unop(Iop_16Uto64, mkexpr(t5))); in dis_ESC_0F__SSE2()
14453 putIReg32(gregOfRexRM(pfx,modrm), unop(Iop_16Uto32, mkexpr(t5))); in dis_ESC_0F__SSE2()
14457 sz==8 ? nameIReg64(gregOfRexRM(pfx,modrm)) in dis_ESC_0F__SSE2()
14458 : nameIReg32(gregOfRexRM(pfx,modrm)) in dis_ESC_0F__SSE2()
14470 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
14473 delta = dis_PEXTRW_128_EregOnly_toG( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14482 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14487 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
14490 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
14496 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F__SSE2()
14503 putXMMReg( gregOfRexRM(pfx,modrm), mkexpr(res) ); in dis_ESC_0F__SSE2()
14507 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14513 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
14516 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
14520 nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
14521 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
14523 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F__SSE2()
14529 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
14533 putXMMReg( gregOfRexRM(pfx,modrm), mkexpr(res) ); in dis_ESC_0F__SSE2()
14540 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14541 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psrlw", Iop_ShrN16x8 ); in dis_ESC_0F__SSE2()
14548 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14549 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psrld", Iop_ShrN32x4 ); in dis_ESC_0F__SSE2()
14556 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14557 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psrlq", Iop_ShrN64x2 ); in dis_ESC_0F__SSE2()
14564 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14565 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14571 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14574 vbi, pfx, delta, opc, "paddq", False ); in dis_ESC_0F__SSE2()
14581 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14582 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14591 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14595 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
14598 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
14606 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
14613 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
14615 getXMMRegLane64( gregOfRexRM(pfx,modrm), 0 )); in dis_ESC_0F__SSE2()
14616 DIP("movq %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), dis_buf ); in dis_ESC_0F__SSE2()
14622 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14627 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 )); in dis_ESC_0F__SSE2()
14628 DIP("movdq2q %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
14642 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
14645 delta = dis_PMOVMSKB_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F__SSE2()
14652 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
14661 putIReg32(gregOfRexRM(pfx,modrm), mkexpr(t1)); in dis_ESC_0F__SSE2()
14663 nameIReg32(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
14673 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14674 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14682 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14683 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14692 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14695 vbi, pfx, delta, opc, "pminub", False ); in dis_ESC_0F__SSE2()
14699 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14700 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14708 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14709 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "pand", Iop_AndV128 ); in dis_ESC_0F__SSE2()
14716 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14717 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14725 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14726 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14735 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14738 vbi, pfx, delta, opc, "pmaxub", False ); in dis_ESC_0F__SSE2()
14742 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14743 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14751 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14752 delta = dis_SSE_E_to_G_all_invG( vbi, pfx, delta, "pandn", Iop_AndV128 ); in dis_ESC_0F__SSE2()
14760 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14763 vbi, pfx, delta, opc, "pavgb", False ); in dis_ESC_0F__SSE2()
14767 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14768 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14776 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14777 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psraw", Iop_SarN16x8 ); in dis_ESC_0F__SSE2()
14784 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14785 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psrad", Iop_SarN32x4 ); in dis_ESC_0F__SSE2()
14793 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14796 vbi, pfx, delta, opc, "pavgw", False ); in dis_ESC_0F__SSE2()
14800 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14801 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14810 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14813 vbi, pfx, delta, opc, "pmuluh", False ); in dis_ESC_0F__SSE2()
14817 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14818 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14826 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14827 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14839 if ( (haveF2no66noF3(pfx) && sz == 4) in dis_ESC_0F__SSE2()
14840 || (have66noF2noF3(pfx) && sz == 2) ) { in dis_ESC_0F__SSE2()
14841 delta = dis_CVTxPD2DQ_128( vbi, pfx, delta, False/*!isAvx*/, in dis_ESC_0F__SSE2()
14847 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14848 delta = dis_CVTDQ2PD_128(vbi, pfx, delta, False/*!isAvx*/); in dis_ESC_0F__SSE2()
14860 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14864 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
14874 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14877 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
14879 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
14881 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
14891 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14892 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14900 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14901 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14910 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14913 vbi, pfx, delta, opc, "pminsw", False ); in dis_ESC_0F__SSE2()
14917 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14918 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14926 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14927 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "por", Iop_OrV128 ); in dis_ESC_0F__SSE2()
14934 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14935 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14943 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14944 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14953 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14956 vbi, pfx, delta, opc, "pmaxsw", False ); in dis_ESC_0F__SSE2()
14960 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14961 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14969 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14970 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "pxor", Iop_XorV128 ); in dis_ESC_0F__SSE2()
14977 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14978 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psllw", Iop_ShlN16x8 ); in dis_ESC_0F__SSE2()
14985 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14986 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "pslld", Iop_ShlN32x4 ); in dis_ESC_0F__SSE2()
14993 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14994 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psllq", Iop_ShlN64x2 ); in dis_ESC_0F__SSE2()
15003 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
15007 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
15010 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
15015 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
15026 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
15042 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
15060 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
15064 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
15066 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
15071 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
15085 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
15088 vbi, pfx, delta, opc, "psadbw", False ); in dis_ESC_0F__SSE2()
15093 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
15097 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
15099 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
15104 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
15119 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
15121 delta = dis_MMX( &ok, vbi, pfx, sz, delta-1 ); in dis_ESC_0F__SSE2()
15125 if (have66noF2noF3(pfx) && sz == 2 && epartIsReg(getUChar(delta))) { in dis_ESC_0F__SSE2()
15126 delta = dis_MASKMOVDQU( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F__SSE2()
15133 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
15134 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
15142 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
15143 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
15151 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
15152 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
15160 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
15161 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
15167 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
15170 vbi, pfx, delta, opc, "psubq", False ); in dis_ESC_0F__SSE2()
15177 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
15178 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
15186 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
15187 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
15195 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
15196 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
15223 static Long dis_MOVDDUP_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_MOVDDUP_128() argument
15232 UInt rG = gregOfRexRM(pfx,modrm); in dis_MOVDDUP_128()
15234 UInt rE = eregOfRexRM(pfx,modrm); in dis_MOVDDUP_128()
15241 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_MOVDDUP_128()
15253 static Long dis_MOVDDUP_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_MOVDDUP_256() argument
15262 UInt rG = gregOfRexRM(pfx,modrm); in dis_MOVDDUP_256()
15264 UInt rE = eregOfRexRM(pfx,modrm); in dis_MOVDDUP_256()
15270 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_MOVDDUP_256()
15285 static Long dis_MOVSxDUP_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_MOVSxDUP_128() argument
15293 UInt rG = gregOfRexRM(pfx,modrm); in dis_MOVSxDUP_128()
15297 UInt rE = eregOfRexRM(pfx,modrm); in dis_MOVSxDUP_128()
15303 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_MOVSxDUP_128()
15319 static Long dis_MOVSxDUP_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_MOVSxDUP_256() argument
15327 UInt rG = gregOfRexRM(pfx,modrm); in dis_MOVSxDUP_256()
15331 UInt rE = eregOfRexRM(pfx,modrm); in dis_MOVSxDUP_256()
15337 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_MOVSxDUP_256()
15400 Prefix pfx, Int sz, Long deltaIN ) in dis_ESC_0F__SSE3() argument
15417 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE3()
15418 delta = dis_MOVSxDUP_128( vbi, pfx, delta, False/*!isAvx*/, in dis_ESC_0F__SSE3()
15424 if (haveF2no66noF3(pfx) in dis_ESC_0F__SSE3()
15426 delta = dis_MOVDDUP_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F__SSE3()
15434 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE3()
15435 delta = dis_MOVSxDUP_128( vbi, pfx, delta, False/*!isAvx*/, in dis_ESC_0F__SSE3()
15445 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE3()
15451 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE3()
15453 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE3()
15458 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE3()
15470 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE3()
15476 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE3()
15478 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE3()
15483 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE3()
15497 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE3()
15501 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE3()
15503 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE3()
15508 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE3()
15519 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE3()
15523 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE3()
15527 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE3()
15532 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE3()
15546 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE3()
15551 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE3()
15552 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE3()
15555 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE3()
15689 static Long dis_PHADD_128 ( const VexAbiInfo* vbi, Prefix pfx, Long delta, in dis_PHADD_128() argument
15706 UInt rG = gregOfRexRM(pfx,modrm); in dis_PHADD_128()
15707 UInt rV = isAvx ? getVexNvvvv(pfx) : rG; in dis_PHADD_128()
15726 UInt rE = eregOfRexRM(pfx,modrm); in dis_PHADD_128()
15732 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PHADD_128()
15763 static Long dis_PHADD_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta, in dis_PHADD_256() argument
15778 UInt rG = gregOfRexRM(pfx,modrm); in dis_PHADD_256()
15779 UInt rV = getVexNvvvv(pfx); in dis_PHADD_256()
15798 UInt rE = eregOfRexRM(pfx,modrm); in dis_PHADD_256()
15803 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PHADD_256()
15881 Prefix pfx, Int sz, Long deltaIN ) in dis_ESC_0F38__SupSSE3() argument
15897 if (have66noF2noF3(pfx) in dis_ESC_0F38__SupSSE3()
15903 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F38__SupSSE3()
15906 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F38__SupSSE3()
15908 DIP("pshufb %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F38__SupSSE3()
15909 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F38__SupSSE3()
15911 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
15916 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F38__SupSSE3()
15920 putXMMReg(gregOfRexRM(pfx,modrm), mkexpr(res)); in dis_ESC_0F38__SupSSE3()
15924 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F38__SupSSE3()
15938 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
15981 if (have66noF2noF3(pfx) in dis_ESC_0F38__SupSSE3()
15983 delta = dis_PHADD_128( vbi, pfx, delta, False/*isAvx*/, opc ); in dis_ESC_0F38__SupSSE3()
15999 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F38__SupSSE3()
16032 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
16053 if (have66noF2noF3(pfx) in dis_ESC_0F38__SupSSE3()
16058 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F38__SupSSE3()
16063 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F38__SupSSE3()
16068 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
16080 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F38__SupSSE3()
16098 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
16136 if (have66noF2noF3(pfx) in dis_ESC_0F38__SupSSE3()
16155 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F38__SupSSE3()
16158 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F38__SupSSE3()
16160 DIP("psign%s %s,%s\n", str, nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F38__SupSSE3()
16161 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F38__SupSSE3()
16163 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
16168 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F38__SupSSE3()
16177 gregOfRexRM(pfx,modrm), in dis_ESC_0F38__SupSSE3()
16188 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F38__SupSSE3()
16211 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
16229 if (have66noF2noF3(pfx) in dis_ESC_0F38__SupSSE3()
16239 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F38__SupSSE3()
16242 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F38__SupSSE3()
16244 DIP("pmulhrsw %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F38__SupSSE3()
16245 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F38__SupSSE3()
16247 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
16252 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F38__SupSSE3()
16261 gregOfRexRM(pfx,modrm), in dis_ESC_0F38__SupSSE3()
16271 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F38__SupSSE3()
16285 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
16306 if (have66noF2noF3(pfx) in dis_ESC_0F38__SupSSE3()
16321 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F38__SupSSE3()
16323 DIP("pabs%s %s,%s\n", str, nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F38__SupSSE3()
16324 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F38__SupSSE3()
16326 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
16331 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F38__SupSSE3()
16334 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F38__SupSSE3()
16341 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F38__SupSSE3()
16362 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
16400 Prefix pfx, Int sz, Long deltaIN ) in dis_ESC_0F3A__SupSSE3() argument
16417 if (have66noF2noF3(pfx) in dis_ESC_0F3A__SupSSE3()
16423 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F3A__SupSSE3()
16426 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F3A__SupSSE3()
16430 nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F3A__SupSSE3()
16431 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F3A__SupSSE3()
16433 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__SupSSE3()
16440 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F3A__SupSSE3()
16444 putXMMReg( gregOfRexRM(pfx,modrm), mkexpr(res) ); in dis_ESC_0F3A__SupSSE3()
16448 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F3A__SupSSE3()
16465 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__SupSSE3()
16527 Prefix pfx, Int sz, Long deltaIN ) in dis_ESC_0F__SSE4() argument
16546 if (haveF3noF2(pfx) /* so both 66 and REX.W are possibilities */ in dis_ESC_0F__SSE4()
16552 assign(src, getIRegE(sz, pfx, modrm)); in dis_ESC_0F__SSE4()
16554 DIP("popcnt%c %s, %s\n", nameISize(sz), nameIRegE(sz, pfx, modrm), in dis_ESC_0F__SSE4()
16555 nameIRegG(sz, pfx, modrm)); in dis_ESC_0F__SSE4()
16557 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0); in dis_ESC_0F__SSE4()
16561 nameIRegG(sz, pfx, modrm)); in dis_ESC_0F__SSE4()
16565 putIRegG(sz, pfx, modrm, mkexpr(result)); in dis_ESC_0F__SSE4()
16590 if (haveF3noF2(pfx) /* so both 66 and 48 are possibilities */ in dis_ESC_0F__SSE4()
16597 assign(src, getIRegE(sz, pfx, modrm)); in dis_ESC_0F__SSE4()
16599 DIP("tzcnt%c %s, %s\n", nameISize(sz), nameIRegE(sz, pfx, modrm), in dis_ESC_0F__SSE4()
16600 nameIRegG(sz, pfx, modrm)); in dis_ESC_0F__SSE4()
16602 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0); in dis_ESC_0F__SSE4()
16606 nameIRegG(sz, pfx, modrm)); in dis_ESC_0F__SSE4()
16610 putIRegG(sz, pfx, modrm, mkexpr(res)); in dis_ESC_0F__SSE4()
16651 if (haveF3noF2(pfx) /* so both 66 and 48 are possibilities */ in dis_ESC_0F__SSE4()
16658 assign(src, getIRegE(sz, pfx, modrm)); in dis_ESC_0F__SSE4()
16660 DIP("lzcnt%c %s, %s\n", nameISize(sz), nameIRegE(sz, pfx, modrm), in dis_ESC_0F__SSE4()
16661 nameIRegG(sz, pfx, modrm)); in dis_ESC_0F__SSE4()
16663 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0); in dis_ESC_0F__SSE4()
16667 nameIRegG(sz, pfx, modrm)); in dis_ESC_0F__SSE4()
16671 putIRegG(sz, pfx, modrm, mkexpr(res)); in dis_ESC_0F__SSE4()
16779 static Long dis_VBLENDV_128 ( const VexAbiInfo* vbi, Prefix pfx, Long delta, in dis_VBLENDV_128() argument
16786 UInt rG = gregOfRexRM(pfx, modrm); in dis_VBLENDV_128()
16787 UInt rV = getVexNvvvv(pfx); in dis_VBLENDV_128()
16794 UInt rE = eregOfRexRM(pfx, modrm); in dis_VBLENDV_128()
16802 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_VBLENDV_128()
16818 static Long dis_VBLENDV_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta, in dis_VBLENDV_256() argument
16825 UInt rG = gregOfRexRM(pfx, modrm); in dis_VBLENDV_256()
16826 UInt rV = getVexNvvvv(pfx); in dis_VBLENDV_256()
16833 UInt rE = eregOfRexRM(pfx, modrm); in dis_VBLENDV_256()
16841 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_VBLENDV_256()
16957 static Long dis_xTESTy_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_xTESTy_128() argument
16964 UInt rG = gregOfRexRM(pfx, modrm); in dis_xTESTy_128()
16969 UInt rE = eregOfRexRM(pfx, modrm); in dis_xTESTy_128()
16977 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_xTESTy_128()
17010 static Long dis_xTESTy_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_xTESTy_256() argument
17017 UInt rG = gregOfRexRM(pfx, modrm); in dis_xTESTy_256()
17022 UInt rE = eregOfRexRM(pfx, modrm); in dis_xTESTy_256()
17029 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_xTESTy_256()
17068 static Long dis_PMOVxXBW_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVxXBW_128() argument
17078 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVxXBW_128()
17080 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVxXBW_128()
17085 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVxXBW_128()
17111 static Long dis_PMOVxXBW_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVxXBW_256() argument
17120 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVxXBW_256()
17122 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVxXBW_256()
17127 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVxXBW_256()
17151 static Long dis_PMOVxXWD_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVxXWD_128() argument
17161 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVxXWD_128()
17164 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVxXWD_128()
17169 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVxXWD_128()
17184 ( gregOfRexRM(pfx, modrm), res ); in dis_PMOVxXWD_128()
17190 static Long dis_PMOVxXWD_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVxXWD_256() argument
17199 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVxXWD_256()
17202 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVxXWD_256()
17207 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVxXWD_256()
17229 static Long dis_PMOVSXWQ_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVSXWQ_128() argument
17238 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVSXWQ_128()
17241 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVSXWQ_128()
17246 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVSXWQ_128()
17262 static Long dis_PMOVSXWQ_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta ) in dis_PMOVSXWQ_256() argument
17269 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVSXWQ_256()
17274 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVSXWQ_256()
17279 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVSXWQ_256()
17297 static Long dis_PMOVZXWQ_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVZXWQ_128() argument
17306 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVZXWQ_128()
17309 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVZXWQ_128()
17314 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVZXWQ_128()
17333 static Long dis_PMOVZXWQ_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVZXWQ_256() argument
17341 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVZXWQ_256()
17344 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVZXWQ_256()
17349 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVZXWQ_256()
17373 static Long dis_PMOVxXDQ_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVxXDQ_128() argument
17384 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVxXDQ_128()
17390 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVxXDQ_128()
17396 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVxXDQ_128()
17420 static Long dis_PMOVxXDQ_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVxXDQ_256() argument
17429 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVxXDQ_256()
17435 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVxXDQ_256()
17440 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVxXDQ_256()
17473 static Long dis_PMOVxXBD_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVxXBD_128() argument
17483 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVxXBD_128()
17485 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVxXBD_128()
17490 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVxXBD_128()
17516 static Long dis_PMOVxXBD_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVxXBD_256() argument
17525 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVxXBD_256()
17527 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVxXBD_256()
17532 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVxXBD_256()
17563 static Long dis_PMOVSXBQ_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVSXBQ_128() argument
17572 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVSXBQ_128()
17574 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVSXBQ_128()
17579 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVSXBQ_128()
17596 static Long dis_PMOVSXBQ_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVSXBQ_256() argument
17604 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVSXBQ_256()
17606 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVSXBQ_256()
17611 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVSXBQ_256()
17642 static Long dis_PMOVZXBQ_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVZXBQ_128() argument
17651 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVZXBQ_128()
17653 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVZXBQ_128()
17658 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVZXBQ_128()
17681 static Long dis_PMOVZXBQ_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVZXBQ_256() argument
17689 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVZXBQ_256()
17691 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVZXBQ_256()
17696 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVZXBQ_256()
17725 static Long dis_PHMINPOSUW_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PHMINPOSUW_128() argument
17737 UInt rG = gregOfRexRM(pfx,modrm); in dis_PHMINPOSUW_128()
17739 UInt rE = eregOfRexRM(pfx,modrm); in dis_PHMINPOSUW_128()
17744 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PHMINPOSUW_128()
17765 static Long dis_AESx ( const VexAbiInfo* vbi, Prefix pfx, in dis_AESx() argument
17772 UInt rG = gregOfRexRM(pfx, modrm); in dis_AESx()
17774 UInt regNoR = (isAvx && opc != 0xDB) ? getVexNvvvv(pfx) : rG; in dis_AESx()
17783 regNoL = eregOfRexRM(pfx, modrm); in dis_AESx()
17787 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AESx()
17857 static Long dis_AESKEYGENASSIST ( const VexAbiInfo* vbi, Prefix pfx, in dis_AESKEYGENASSIST() argument
17865 UInt regNoR = gregOfRexRM(pfx, modrm); in dis_AESKEYGENASSIST()
17871 regNoL = eregOfRexRM(pfx, modrm); in dis_AESKEYGENASSIST()
17876 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_AESKEYGENASSIST()
17926 Prefix pfx, Int sz, Long deltaIN ) in dis_ESC_0F38__SSE4() argument
17949 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17973 assign(vecE, getXMMReg(eregOfRexRM(pfx, modrm))); in dis_ESC_0F38__SSE4()
17976 nameXMMReg( eregOfRexRM(pfx, modrm) ), in dis_ESC_0F38__SSE4()
17977 nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F38__SSE4()
17979 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SSE4()
17984 dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F38__SSE4()
17987 assign(vecG, getXMMReg(gregOfRexRM(pfx, modrm))); in dis_ESC_0F38__SSE4()
17991 putXMMReg(gregOfRexRM(pfx, modrm), mkexpr(res)); in dis_ESC_0F38__SSE4()
18000 if (have66noF2noF3(pfx) in dis_ESC_0F38__SSE4()
18002 delta = dis_xTESTy_128( vbi, pfx, delta, False/*!isAvx*/, 0 ); in dis_ESC_0F38__SSE4()
18010 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18011 delta = dis_PMOVxXBW_128( vbi, pfx, delta, in dis_ESC_0F38__SSE4()
18020 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18021 delta = dis_PMOVxXBD_128( vbi, pfx, delta, in dis_ESC_0F38__SSE4()
18030 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18031 delta = dis_PMOVSXBQ_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F38__SSE4()
18039 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18040 delta = dis_PMOVxXWD_128(vbi, pfx, delta, in dis_ESC_0F38__SSE4()
18049 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18050 delta = dis_PMOVSXWQ_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F38__SSE4()
18058 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18059 delta = dis_PMOVxXDQ_128( vbi, pfx, delta, in dis_ESC_0F38__SSE4()
18071 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18075 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F38__SSE4()
18078 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F38__SSE4()
18083 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SSE4()
18097 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18099 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F38__SSE4()
18109 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18112 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SSE4()
18114 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F38__SSE4()
18117 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F38__SSE4()
18127 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18135 assign( argL, getXMMReg( eregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F38__SSE4()
18138 nameXMMReg( eregOfRexRM(pfx, modrm) ), in dis_ESC_0F38__SSE4()
18139 nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F38__SSE4()
18141 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SSE4()
18146 dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F38__SSE4()
18149 assign(argR, getXMMReg( gregOfRexRM(pfx, modrm) )); in dis_ESC_0F38__SSE4()
18151 putXMMReg( gregOfRexRM(pfx, modrm), in dis_ESC_0F38__SSE4()
18162 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18163 delta = dis_PMOVxXBW_128( vbi, pfx, delta, in dis_ESC_0F38__SSE4()
18172 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18173 delta = dis_PMOVxXBD_128( vbi, pfx, delta, in dis_ESC_0F38__SSE4()
18182 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18183 delta = dis_PMOVZXBQ_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F38__SSE4()
18191 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18192 delta = dis_PMOVxXWD_128( vbi, pfx, delta, in dis_ESC_0F38__SSE4()
18201 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18202 delta = dis_PMOVZXWQ_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F38__SSE4()
18210 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18211 delta = dis_PMOVxXDQ_128( vbi, pfx, delta, in dis_ESC_0F38__SSE4()
18221 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18223 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F38__SSE4()
18234 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18238 vbi, pfx, delta, in dis_ESC_0F38__SSE4()
18254 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18258 vbi, pfx, delta, in dis_ESC_0F38__SSE4()
18274 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18278 vbi, pfx, delta, in dis_ESC_0F38__SSE4()
18294 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18298 vbi, pfx, delta, in dis_ESC_0F38__SSE4()
18310 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18318 assign( argL, getXMMReg( eregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F38__SSE4()
18321 nameXMMReg( eregOfRexRM(pfx, modrm) ), in dis_ESC_0F38__SSE4()
18322 nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F38__SSE4()
18324 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SSE4()
18329 dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F38__SSE4()
18332 assign(argR, getXMMReg( gregOfRexRM(pfx, modrm) )); in dis_ESC_0F38__SSE4()
18334 putXMMReg( gregOfRexRM(pfx, modrm), in dis_ESC_0F38__SSE4()
18344 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18345 delta = dis_PHMINPOSUW_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F38__SSE4()
18361 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
18362 delta = dis_AESx( vbi, pfx, delta, False/*!isAvx*/, opc ); in dis_ESC_0F38__SSE4()
18373 if (haveF2noF3(pfx) in dis_ESC_0F38__SSE4()
18374 && (opc == 0xF1 || (opc == 0xF0 && !have66(pfx)))) { in dis_ESC_0F38__SSE4()
18386 assign(valE, getIRegE(sz, pfx, modrm)); in dis_ESC_0F38__SSE4()
18388 DIP("crc32b %s,%s\n", nameIRegE(sz, pfx, modrm), in dis_ESC_0F38__SSE4()
18389 nameIRegG(1==getRexW(pfx) ? 8 : 4, pfx, modrm)); in dis_ESC_0F38__SSE4()
18391 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SSE4()
18395 nameIRegG(1==getRexW(pfx) ? 8 : 4, pfx, modrm)); in dis_ESC_0F38__SSE4()
18403 assign(valG0, binop(Iop_And64, getIRegG(8, pfx, modrm), in dis_ESC_0F38__SSE4()
18425 putIRegG(4, pfx, modrm, unop(Iop_64to32, mkexpr(valG1))); in dis_ESC_0F38__SSE4()
18451 static Long dis_PEXTRW ( const VexAbiInfo* vbi, Prefix pfx, in dis_PEXTRW() argument
18462 UInt rG = gregOfRexRM(pfx,modrm); in dis_PEXTRW()
18468 vassert(0==getRexW(pfx)); /* ensured by caller */ in dis_PEXTRW()
18475 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_PEXTRW()
18492 UInt rE = eregOfRexRM(pfx,modrm); in dis_PEXTRW()
18506 static Long dis_PEXTRD ( const VexAbiInfo* vbi, Prefix pfx, in dis_PEXTRD() argument
18523 vassert(0==getRexW(pfx)); /* ensured by caller */ in dis_PEXTRD()
18525 assign( xmm_vec, getXMMReg( gregOfRexRM(pfx,modrm) ) ); in dis_PEXTRD()
18531 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_PEXTRD()
18544 putIReg32( eregOfRexRM(pfx,modrm), mkexpr(src_dword) ); in dis_PEXTRD()
18547 nameXMMReg( gregOfRexRM(pfx, modrm) ), in dis_PEXTRD()
18548 nameIReg32( eregOfRexRM(pfx, modrm) ) ); in dis_PEXTRD()
18553 imm8_10, nameXMMReg( gregOfRexRM(pfx, modrm) ), dis_buf ); in dis_PEXTRD()
18559 static Long dis_PEXTRQ ( const VexAbiInfo* vbi, Prefix pfx, in dis_PEXTRQ() argument
18572 vassert(1==getRexW(pfx)); /* ensured by caller */ in dis_PEXTRQ()
18574 assign( xmm_vec, getXMMReg( gregOfRexRM(pfx,modrm) ) ); in dis_PEXTRQ()
18579 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_PEXTRQ()
18592 putIReg64( eregOfRexRM(pfx,modrm), mkexpr(src_qword) ); in dis_PEXTRQ()
18595 nameXMMReg( gregOfRexRM(pfx, modrm) ), in dis_PEXTRQ()
18596 nameIReg64( eregOfRexRM(pfx, modrm) ) ); in dis_PEXTRQ()
18601 imm8_0, nameXMMReg( gregOfRexRM(pfx, modrm) ), dis_buf ); in dis_PEXTRQ()
18740 static Long dis_PCMPxSTRx ( const VexAbiInfo* vbi, Prefix pfx, in dis_PCMPxSTRx() argument
18760 regNoL = eregOfRexRM(pfx, modrm); in dis_PCMPxSTRx()
18761 regNoR = gregOfRexRM(pfx, modrm); in dis_PCMPxSTRx()
18766 regNoR = gregOfRexRM(pfx, modrm); in dis_PCMPxSTRx()
18767 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_PCMPxSTRx()
18992 static Long dis_PEXTRB_128_GtoE ( const VexAbiInfo* vbi, Prefix pfx, in dis_PEXTRB_128_GtoE() argument
19005 assign( xmm_vec, getXMMReg( gregOfRexRM(pfx,modrm) ) ); in dis_PEXTRB_128_GtoE()
19012 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_PEXTRB_128_GtoE()
19026 putIReg64( eregOfRexRM(pfx,modrm), in dis_PEXTRB_128_GtoE()
19031 nameXMMReg( gregOfRexRM(pfx, modrm) ), in dis_PEXTRB_128_GtoE()
19032 nameIReg64( eregOfRexRM(pfx, modrm) ) ); in dis_PEXTRB_128_GtoE()
19037 imm8, nameXMMReg( gregOfRexRM(pfx, modrm) ), dis_buf ); in dis_PEXTRB_128_GtoE()
19168 static Long dis_EXTRACTPS ( const VexAbiInfo* vbi, Prefix pfx, in dis_EXTRACTPS() argument
19178 UInt rG = gregOfRexRM(pfx,modrm); in dis_EXTRACTPS()
19188 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_EXTRACTPS()
19201 UInt rE = eregOfRexRM(pfx,modrm); in dis_EXTRACTPS()
19248 Prefix pfx, Int sz, Long deltaIN ) in dis_ESC_0F3A__SSE4() argument
19264 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19281 getXMMRegLane32F( eregOfRexRM(pfx, modrm), 0 ) ); in dis_ESC_0F3A__SSE4()
19283 getXMMRegLane32F( eregOfRexRM(pfx, modrm), 1 ) ); in dis_ESC_0F3A__SSE4()
19285 getXMMRegLane32F( eregOfRexRM(pfx, modrm), 2 ) ); in dis_ESC_0F3A__SSE4()
19287 getXMMRegLane32F( eregOfRexRM(pfx, modrm), 3 ) ); in dis_ESC_0F3A__SSE4()
19292 imm, nameXMMReg( eregOfRexRM(pfx, modrm) ), in dis_ESC_0F3A__SSE4()
19293 nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
19295 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__SSE4()
19309 imm, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
19323 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 0, mkexpr(res0) ); in dis_ESC_0F3A__SSE4()
19324 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 1, mkexpr(res1) ); in dis_ESC_0F3A__SSE4()
19325 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 2, mkexpr(res2) ); in dis_ESC_0F3A__SSE4()
19326 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 3, mkexpr(res3) ); in dis_ESC_0F3A__SSE4()
19334 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19347 getXMMRegLane64F( eregOfRexRM(pfx, modrm), 0 ) ); in dis_ESC_0F3A__SSE4()
19349 getXMMRegLane64F( eregOfRexRM(pfx, modrm), 1 ) ); in dis_ESC_0F3A__SSE4()
19354 imm, nameXMMReg( eregOfRexRM(pfx, modrm) ), in dis_ESC_0F3A__SSE4()
19355 nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
19357 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__SSE4()
19367 imm, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
19379 putXMMRegLane64F( gregOfRexRM(pfx, modrm), 0, mkexpr(res0) ); in dis_ESC_0F3A__SSE4()
19380 putXMMRegLane64F( gregOfRexRM(pfx, modrm), 1, mkexpr(res1) ); in dis_ESC_0F3A__SSE4()
19391 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19402 isD ? getXMMRegLane64F( eregOfRexRM(pfx, modrm), 0 ) in dis_ESC_0F3A__SSE4()
19403 : getXMMRegLane32F( eregOfRexRM(pfx, modrm), 0 ) ); in dis_ESC_0F3A__SSE4()
19409 imm, nameXMMReg( eregOfRexRM(pfx, modrm) ), in dis_ESC_0F3A__SSE4()
19410 nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
19412 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__SSE4()
19419 imm, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
19432 putXMMRegLane64F( gregOfRexRM(pfx, modrm), 0, mkexpr(res) ); in dis_ESC_0F3A__SSE4()
19434 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 0, mkexpr(res) ); in dis_ESC_0F3A__SSE4()
19443 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19451 assign( dst_vec, getXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
19455 assign( src_vec, getXMMReg( eregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
19458 nameXMMReg( eregOfRexRM(pfx, modrm) ), in dis_ESC_0F3A__SSE4()
19459 nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
19461 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__SSE4()
19468 imm8, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
19471 putXMMReg( gregOfRexRM(pfx, modrm), in dis_ESC_0F3A__SSE4()
19480 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19487 assign( dst_vec, getXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
19491 assign( src_vec, getXMMReg( eregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
19494 nameXMMReg( eregOfRexRM(pfx, modrm) ), in dis_ESC_0F3A__SSE4()
19495 nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
19497 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__SSE4()
19504 imm8, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
19507 putXMMReg( gregOfRexRM(pfx, modrm), in dis_ESC_0F3A__SSE4()
19516 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19524 assign( dst_vec, getXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
19528 assign( src_vec, getXMMReg( eregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
19531 nameXMMReg( eregOfRexRM(pfx, modrm) ), in dis_ESC_0F3A__SSE4()
19532 nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
19534 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__SSE4()
19541 imm8, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
19544 putXMMReg( gregOfRexRM(pfx, modrm), in dis_ESC_0F3A__SSE4()
19554 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19555 delta = dis_PEXTRB_128_GtoE( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F3A__SSE4()
19564 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19565 delta = dis_PEXTRW( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F3A__SSE4()
19575 if (have66noF2noF3(pfx) in dis_ESC_0F3A__SSE4()
19577 delta = dis_PEXTRD( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F3A__SSE4()
19584 if (have66noF2noF3(pfx) in dis_ESC_0F3A__SSE4()
19586 delta = dis_PEXTRQ( vbi, pfx, delta, False/*!isAvx*/); in dis_ESC_0F3A__SSE4()
19596 if (have66noF2noF3(pfx) in dis_ESC_0F3A__SSE4()
19598 delta = dis_EXTRACTPS( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F3A__SSE4()
19606 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19610 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19612 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F3A__SSE4()
19619 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__SSE4()
19627 assign(src_vec, getXMMReg( gregOfRexRM(pfx, modrm) )); in dis_ESC_0F3A__SSE4()
19637 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19643 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19646 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19657 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__SSE4()
19676 if (have66noF2noF3(pfx) in dis_ESC_0F3A__SSE4()
19681 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19684 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F3A__SSE4()
19691 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__SSE4()
19707 if (have66noF2noF3(pfx) in dis_ESC_0F3A__SSE4()
19712 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19715 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F3A__SSE4()
19722 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__SSE4()
19741 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19746 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19749 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19756 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__SSE4()
19774 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19779 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19782 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19789 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__SSE4()
19807 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19812 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19817 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19825 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__SSE4()
19844 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19850 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19855 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19862 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__SSE4()
19888 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19890 delta = dis_PCMPxSTRx( vbi, pfx, delta, False/*!isAvx*/, opc ); in dis_ESC_0F3A__SSE4()
19898 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19899 delta = dis_AESKEYGENASSIST( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F3A__SSE4()
19935 Prefix pfx, Int sz, Long deltaIN in dis_ESC_NONE() argument
19964 Bool validF2orF3 = haveF2orF3(pfx) ? False : True; in dis_ESC_NONE()
19975 && haveF2orF3(pfx) && !haveF2andF3(pfx) && haveLOCK(pfx)) { in dis_ESC_NONE()
19992 delta = dis_op2_G_E ( vbi, pfx, Iop_Add8, WithFlagNone, True, 1, delta, "add" ); in dis_ESC_NONE()
19996 delta = dis_op2_G_E ( vbi, pfx, Iop_Add8, WithFlagNone, True, sz, delta, "add" ); in dis_ESC_NONE()
20000 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20001 delta = dis_op2_E_G ( vbi, pfx, Iop_Add8, WithFlagNone, True, 1, delta, "add" ); in dis_ESC_NONE()
20004 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20005 delta = dis_op2_E_G ( vbi, pfx, Iop_Add8, WithFlagNone, True, sz, delta, "add" ); in dis_ESC_NONE()
20009 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20013 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20019 delta = dis_op2_G_E ( vbi, pfx, Iop_Or8, WithFlagNone, True, 1, delta, "or" ); in dis_ESC_NONE()
20023 delta = dis_op2_G_E ( vbi, pfx, Iop_Or8, WithFlagNone, True, sz, delta, "or" ); in dis_ESC_NONE()
20027 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20028 delta = dis_op2_E_G ( vbi, pfx, Iop_Or8, WithFlagNone, True, 1, delta, "or" ); in dis_ESC_NONE()
20031 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20032 delta = dis_op2_E_G ( vbi, pfx, Iop_Or8, WithFlagNone, True, sz, delta, "or" ); in dis_ESC_NONE()
20036 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20040 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20046 delta = dis_op2_G_E ( vbi, pfx, Iop_Add8, WithFlagCarry, True, 1, delta, "adc" ); in dis_ESC_NONE()
20050 delta = dis_op2_G_E ( vbi, pfx, Iop_Add8, WithFlagCarry, True, sz, delta, "adc" ); in dis_ESC_NONE()
20054 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20055 delta = dis_op2_E_G ( vbi, pfx, Iop_Add8, WithFlagCarry, True, 1, delta, "adc" ); in dis_ESC_NONE()
20058 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20059 delta = dis_op2_E_G ( vbi, pfx, Iop_Add8, WithFlagCarry, True, sz, delta, "adc" ); in dis_ESC_NONE()
20063 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20067 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20073 delta = dis_op2_G_E ( vbi, pfx, Iop_Sub8, WithFlagCarry, True, 1, delta, "sbb" ); in dis_ESC_NONE()
20077 delta = dis_op2_G_E ( vbi, pfx, Iop_Sub8, WithFlagCarry, True, sz, delta, "sbb" ); in dis_ESC_NONE()
20081 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20082 delta = dis_op2_E_G ( vbi, pfx, Iop_Sub8, WithFlagCarry, True, 1, delta, "sbb" ); in dis_ESC_NONE()
20085 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20086 delta = dis_op2_E_G ( vbi, pfx, Iop_Sub8, WithFlagCarry, True, sz, delta, "sbb" ); in dis_ESC_NONE()
20090 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20094 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20100 delta = dis_op2_G_E ( vbi, pfx, Iop_And8, WithFlagNone, True, 1, delta, "and" ); in dis_ESC_NONE()
20104 delta = dis_op2_G_E ( vbi, pfx, Iop_And8, WithFlagNone, True, sz, delta, "and" ); in dis_ESC_NONE()
20108 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20109 delta = dis_op2_E_G ( vbi, pfx, Iop_And8, WithFlagNone, True, 1, delta, "and" ); in dis_ESC_NONE()
20112 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20113 delta = dis_op2_E_G ( vbi, pfx, Iop_And8, WithFlagNone, True, sz, delta, "and" ); in dis_ESC_NONE()
20117 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20121 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20127 delta = dis_op2_G_E ( vbi, pfx, Iop_Sub8, WithFlagNone, True, 1, delta, "sub" ); in dis_ESC_NONE()
20131 delta = dis_op2_G_E ( vbi, pfx, Iop_Sub8, WithFlagNone, True, sz, delta, "sub" ); in dis_ESC_NONE()
20135 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20136 delta = dis_op2_E_G ( vbi, pfx, Iop_Sub8, WithFlagNone, True, 1, delta, "sub" ); in dis_ESC_NONE()
20139 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20140 delta = dis_op2_E_G ( vbi, pfx, Iop_Sub8, WithFlagNone, True, sz, delta, "sub" ); in dis_ESC_NONE()
20144 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20148 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20154 delta = dis_op2_G_E ( vbi, pfx, Iop_Xor8, WithFlagNone, True, 1, delta, "xor" ); in dis_ESC_NONE()
20158 delta = dis_op2_G_E ( vbi, pfx, Iop_Xor8, WithFlagNone, True, sz, delta, "xor" ); in dis_ESC_NONE()
20162 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20163 delta = dis_op2_E_G ( vbi, pfx, Iop_Xor8, WithFlagNone, True, 1, delta, "xor" ); in dis_ESC_NONE()
20166 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20167 delta = dis_op2_E_G ( vbi, pfx, Iop_Xor8, WithFlagNone, True, sz, delta, "xor" ); in dis_ESC_NONE()
20171 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20175 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20180 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20181 delta = dis_op2_G_E ( vbi, pfx, Iop_Sub8, WithFlagNone, False, 1, delta, "cmp" ); in dis_ESC_NONE()
20184 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20185 delta = dis_op2_G_E ( vbi, pfx, Iop_Sub8, WithFlagNone, False, sz, delta, "cmp" ); in dis_ESC_NONE()
20189 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20190 delta = dis_op2_E_G ( vbi, pfx, Iop_Sub8, WithFlagNone, False, 1, delta, "cmp" ); in dis_ESC_NONE()
20193 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20194 delta = dis_op2_E_G ( vbi, pfx, Iop_Sub8, WithFlagNone, False, sz, delta, "cmp" ); in dis_ESC_NONE()
20198 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20202 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20217 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20224 assign(t1, getIRegRexB(sz, pfx, opc-0x50)); in dis_ESC_NONE()
20228 DIP("push%c %s\n", nameISize(sz), nameIRegRexB(sz,pfx,opc-0x50)); in dis_ESC_NONE()
20239 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20248 putIRegRexB(sz, pfx, opc-0x58, mkexpr(t1)); in dis_ESC_NONE()
20249 DIP("pop%c %s\n", nameISize(sz), nameIRegRexB(sz,pfx,opc-0x58)); in dis_ESC_NONE()
20253 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20254 if (haveREX(pfx) && 1==getRexW(pfx)) { in dis_ESC_NONE()
20260 putIRegG(8, pfx, modrm, in dis_ESC_NONE()
20262 getIRegE(4, pfx, modrm))); in dis_ESC_NONE()
20264 nameIRegE(4, pfx, modrm), in dis_ESC_NONE()
20265 nameIRegG(8, pfx, modrm)); in dis_ESC_NONE()
20268 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_NONE()
20270 putIRegG(8, pfx, modrm, in dis_ESC_NONE()
20274 nameIRegG(8, pfx, modrm)); in dis_ESC_NONE()
20282 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20290 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20291 delta = dis_imul_I_E_G ( vbi, pfx, sz, delta, sz ); in dis_ESC_NONE()
20295 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20315 delta = dis_imul_I_E_G ( vbi, pfx, sz, delta, 1 ); in dis_ESC_NONE()
20336 if (haveF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20337 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_ESC_NONE()
20400 if (epartIsReg(modrm) && haveF2orF3(pfx)) in dis_ESC_NONE()
20402 if (!epartIsReg(modrm) && haveF2andF3(pfx)) in dis_ESC_NONE()
20404 if (!epartIsReg(modrm) && haveF2orF3(pfx) && !haveLOCK(pfx)) in dis_ESC_NONE()
20406 am_sz = lengthAMode(pfx,delta); in dis_ESC_NONE()
20410 delta = dis_Grp1 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, d64 ); in dis_ESC_NONE()
20416 if (epartIsReg(modrm) && haveF2orF3(pfx)) in dis_ESC_NONE()
20418 if (!epartIsReg(modrm) && haveF2andF3(pfx)) in dis_ESC_NONE()
20420 if (!epartIsReg(modrm) && haveF2orF3(pfx) && !haveLOCK(pfx)) in dis_ESC_NONE()
20422 am_sz = lengthAMode(pfx,delta); in dis_ESC_NONE()
20425 delta = dis_Grp1 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, d64 ); in dis_ESC_NONE()
20429 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20431 am_sz = lengthAMode(pfx,delta); in dis_ESC_NONE()
20434 delta = dis_Grp1 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, d64 ); in dis_ESC_NONE()
20438 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20439 delta = dis_op2_E_G ( vbi, pfx, Iop_And8, WithFlagNone, False, in dis_ESC_NONE()
20444 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20445 delta = dis_op2_E_G ( vbi, pfx, Iop_And8, WithFlagNone, False, in dis_ESC_NONE()
20461 if (haveF2orF3(pfx)) { in dis_ESC_NONE()
20465 if (haveF2andF3(pfx)) in dis_ESC_NONE()
20472 assign(t1, getIRegE(sz, pfx, modrm)); in dis_ESC_NONE()
20473 assign(t2, getIRegG(sz, pfx, modrm)); in dis_ESC_NONE()
20474 putIRegG(sz, pfx, modrm, mkexpr(t1)); in dis_ESC_NONE()
20475 putIRegE(sz, pfx, modrm, mkexpr(t2)); in dis_ESC_NONE()
20478 nameISize(sz), nameIRegG(sz, pfx, modrm), in dis_ESC_NONE()
20479 nameIRegE(sz, pfx, modrm)); in dis_ESC_NONE()
20482 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_NONE()
20484 assign( t2, getIRegG(sz, pfx, modrm) ); in dis_ESC_NONE()
20487 putIRegG( sz, pfx, modrm, mkexpr(t1) ); in dis_ESC_NONE()
20490 nameIRegG(sz, pfx, modrm), dis_buf); in dis_ESC_NONE()
20497 delta = dis_mov_G_E(vbi, pfx, 1, delta, &ok); in dis_ESC_NONE()
20505 delta = dis_mov_G_E(vbi, pfx, sz, delta, &ok); in dis_ESC_NONE()
20511 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20512 delta = dis_mov_E_G(vbi, pfx, 1, delta); in dis_ESC_NONE()
20516 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20517 delta = dis_mov_E_G(vbi, pfx, sz, delta); in dis_ESC_NONE()
20521 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20522 delta = dis_mov_S_E(vbi, pfx, sz, delta); in dis_ESC_NONE()
20526 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20535 addr = disAMode ( &alen, vbi, clearSegBits(pfx), delta, dis_buf, 0 ); in dis_ESC_NONE()
20540 putIRegG( sz, pfx, modrm, in dis_ESC_NONE()
20546 nameIRegG(sz,pfx,modrm)); in dis_ESC_NONE()
20554 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20580 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_NONE()
20591 if (!have66(pfx) && !haveF2(pfx) && haveF3(pfx)) { in dis_ESC_NONE()
20601 !haveF2orF3(pfx) in dis_ESC_NONE()
20603 && getRexB(pfx)==0 ) { in dis_ESC_NONE()
20616 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20617 codegen_xchg_rAX_Reg ( pfx, sz, opc - 0x90 ); in dis_ESC_NONE()
20621 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20640 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20662 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20718 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20791 if (have66orF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20801 assign( addr, handleAddrOverrides(vbi, pfx, mkU64(d64)) ); in dis_ESC_NONE()
20804 segRegTxt(pfx), (ULong)d64, in dis_ESC_NONE()
20809 if (have66orF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20819 assign( addr, handleAddrOverrides(vbi, pfx, mkU64(d64)) ); in dis_ESC_NONE()
20822 segRegTxt(pfx), (ULong)d64); in dis_ESC_NONE()
20828 if (haveF3(pfx) && !haveF2(pfx)) { in dis_ESC_NONE()
20833 guest_RIP_bbstart+delta, "rep movs", pfx ); in dis_ESC_NONE()
20838 if (!haveF3(pfx) && !haveF2(pfx)) { in dis_ESC_NONE()
20841 dis_string_op( dis_MOVS, sz, "movs", pfx ); in dis_ESC_NONE()
20849 if (haveF3(pfx) && !haveF2(pfx)) { in dis_ESC_NONE()
20854 guest_RIP_bbstart+delta, "repe cmps", pfx ); in dis_ESC_NONE()
20863 if (haveF3(pfx) && !haveF2(pfx)) { in dis_ESC_NONE()
20868 guest_RIP_bbstart+delta, "rep stos", pfx ); in dis_ESC_NONE()
20873 if (!haveF3(pfx) && !haveF2(pfx)) { in dis_ESC_NONE()
20876 dis_string_op( dis_STOS, sz, "stos", pfx ); in dis_ESC_NONE()
20882 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20886 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20892 dis_string_op( dis_LODS, ( opc == 0xAC ? 1 : sz ), "lods", pfx ); in dis_ESC_NONE()
20898 if (haveF2(pfx) && !haveF3(pfx)) { in dis_ESC_NONE()
20903 guest_RIP_bbstart+delta, "repne scas", pfx ); in dis_ESC_NONE()
20908 if (!haveF2(pfx) && haveF3(pfx)) { in dis_ESC_NONE()
20913 guest_RIP_bbstart+delta, "repe scas", pfx ); in dis_ESC_NONE()
20918 if (!haveF2(pfx) && !haveF3(pfx)) { in dis_ESC_NONE()
20921 dis_string_op( dis_SCAS, sz, "scas", pfx ); in dis_ESC_NONE()
20935 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20938 putIRegRexB(1, pfx, opc-0xB0, mkU8(d64)); in dis_ESC_NONE()
20939 DIP("movb $%lld,%s\n", d64, nameIRegRexB(1,pfx,opc-0xB0)); in dis_ESC_NONE()
20952 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20956 putIRegRexB(8, pfx, opc-0xB8, mkU64(d64)); in dis_ESC_NONE()
20958 nameIRegRexB(8,pfx,opc-0xB8)); in dis_ESC_NONE()
20962 putIRegRexB(sz, pfx, opc-0xB8, in dis_ESC_NONE()
20966 nameIRegRexB(sz,pfx,opc-0xB8)); in dis_ESC_NONE()
20972 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20974 am_sz = lengthAMode(pfx,delta); in dis_ESC_NONE()
20978 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, in dis_ESC_NONE()
20986 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20988 am_sz = lengthAMode(pfx,delta); in dis_ESC_NONE()
20991 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, in dis_ESC_NONE()
20998 if (have66orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20999 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_ESC_NONE()
21007 if (have66(pfx)) goto decode_failure; in dis_ESC_NONE()
21009 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_ESC_NONE()
21011 DIP(haveF3(pfx) ? "rep ; ret\n" : "ret\n"); in dis_ESC_NONE()
21024 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
21028 putIRegE(sz, pfx, modrm, in dis_ESC_NONE()
21032 nameIRegE(sz,pfx,modrm)); in dis_ESC_NONE()
21034 if (haveF2(pfx)) goto decode_failure; in dis_ESC_NONE()
21036 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_NONE()
21048 if (opc == 0xC7 && modrm == 0xF8 && !have66orF2orF3(pfx) && sz == 4 in dis_ESC_NONE()
21068 if (opc == 0xC6 && modrm == 0xF8 && !have66orF2orF3(pfx) && sz == 1 in dis_ESC_NONE()
21148 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
21150 am_sz = lengthAMode(pfx,delta); in dis_ESC_NONE()
21154 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, in dis_ESC_NONE()
21162 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
21164 am_sz = lengthAMode(pfx,delta); in dis_ESC_NONE()
21167 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, in dis_ESC_NONE()
21175 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
21177 am_sz = lengthAMode(pfx,delta); in dis_ESC_NONE()
21180 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, in dis_ESC_NONE()
21188 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
21190 am_sz = lengthAMode(pfx,delta); in dis_ESC_NONE()
21192 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, in dis_ESC_NONE()
21208 if (haveF2orF3(pfx)) in dis_ESC_NONE()
21237 delta = dis_FPU ( &decode_OK, vbi, pfx, delta ); in dis_ESC_NONE()
21254 if (have66orF2orF3(pfx) || 1==getRexW(pfx)) goto decode_failure; in dis_ESC_NONE()
21261 if (haveASO(pfx)) { in dis_ESC_NONE()
21296 DIP("loop%s%s 0x%llx\n", xtra, haveASO(pfx) ? "l" : "", (ULong)d64); in dis_ESC_NONE()
21302 if (have66orF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
21305 if (haveASO(pfx)) { in dis_ESC_NONE()
21360 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
21409 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
21425 if (haveF3(pfx)) goto decode_failure; in dis_ESC_NONE()
21426 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_ESC_NONE()
21449 if (haveF3(pfx)) goto decode_failure; in dis_ESC_NONE()
21452 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_ESC_NONE()
21466 if (haveF3(pfx)) goto decode_failure; in dis_ESC_NONE()
21469 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_ESC_NONE()
21519 delta = dis_Grp3 ( vbi, pfx, 1, delta, &decode_OK ); in dis_ESC_NONE()
21528 delta = dis_Grp3 ( vbi, pfx, sz, delta, &decode_OK ); in dis_ESC_NONE()
21534 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
21540 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
21549 delta = dis_Grp4 ( vbi, pfx, delta, &decode_OK ); in dis_ESC_NONE()
21558 delta = dis_Grp5 ( vbi, pfx, sz, delta, dres, &decode_OK ); in dis_ESC_NONE()
21666 Prefix pfx, Int sz, Long deltaIN in dis_ESC_0F() argument
21695 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F()
21819 if (have66orF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
21824 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F()
21845 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F()
21861 if (have66orF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
21886 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
21887 delta = dis_cmov_E_G(vbi, pfx, sz, (AMD64Condcode)(opc - 0x40), delta); in dis_ESC_0F()
21908 if (haveF3(pfx)) goto decode_failure; in dis_ESC_0F()
21909 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_ESC_0F()
21984 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
21990 putIRegE(1, pfx, modrm, mkexpr(t1)); in dis_ESC_0F()
21992 nameIRegE(1,pfx,modrm)); in dis_ESC_0F()
21994 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F()
22021 int bnd = gregOfRexRM(pfx,modrm); in dis_ESC_0F()
22024 oper = nameIReg64 (eregOfRexRM(pfx,modrm)); in dis_ESC_0F()
22027 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F()
22032 if (haveF3no66noF2 (pfx)) { in dis_ESC_0F()
22038 } else if (haveF2no66noF3 (pfx)) { in dis_ESC_0F()
22044 } else if (have66noF2noF3 (pfx)) { in dis_ESC_0F()
22050 } else if (haveNo66noF2noF3 (pfx)) { in dis_ESC_0F()
22070 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
22132 delta = dis_bt_G_E ( vbi, pfx, sz, delta, BtOpNone, &ok ); in dis_ESC_0F()
22139 d64 = delta + lengthAMode(pfx, delta); in dis_ESC_0F()
22142 vbi, pfx, delta, modrm, sz, in dis_ESC_0F()
22150 vbi, pfx, delta, modrm, sz, in dis_ESC_0F()
22159 delta = dis_bt_G_E ( vbi, pfx, sz, delta, BtOpSet, &ok ); in dis_ESC_0F()
22166 d64 = delta + lengthAMode(pfx, delta); in dis_ESC_0F()
22169 vbi, pfx, delta, modrm, sz, in dis_ESC_0F()
22177 vbi, pfx, delta, modrm, sz, in dis_ESC_0F()
22183 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
22184 delta = dis_mul_E_G ( vbi, pfx, sz, delta ); in dis_ESC_0F()
22190 delta = dis_cmpxchg_G_E ( &ok, vbi, pfx, 1, delta ); in dis_ESC_0F()
22199 delta = dis_cmpxchg_G_E ( &ok, vbi, pfx, sz, delta ); in dis_ESC_0F()
22208 delta = dis_bt_G_E ( vbi, pfx, sz, delta, BtOpReset, &ok ); in dis_ESC_0F()
22214 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
22217 delta = dis_movx_E_G ( vbi, pfx, delta, 1, sz, False ); in dis_ESC_0F()
22221 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
22224 delta = dis_movx_E_G ( vbi, pfx, delta, 2, sz, False ); in dis_ESC_0F()
22231 am_sz = lengthAMode(pfx,delta); in dis_ESC_0F()
22233 delta = dis_Grp8_Imm ( vbi, pfx, delta, modrm, am_sz, sz, d64, in dis_ESC_0F()
22244 delta = dis_bt_G_E ( vbi, pfx, sz, delta, BtOpComp, &ok ); in dis_ESC_0F()
22250 if (!haveF2orF3(pfx) in dis_ESC_0F()
22251 || (haveF3noF2(pfx) in dis_ESC_0F()
22255 delta = dis_bs_E_G ( vbi, pfx, sz, delta, True ); in dis_ESC_0F()
22263 if (!haveF2orF3(pfx) in dis_ESC_0F()
22264 || (haveF3noF2(pfx) in dis_ESC_0F()
22268 delta = dis_bs_E_G ( vbi, pfx, sz, delta, False ); in dis_ESC_0F()
22276 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
22279 delta = dis_movx_E_G ( vbi, pfx, delta, 1, sz, True ); in dis_ESC_0F()
22283 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
22286 delta = dis_movx_E_G ( vbi, pfx, delta, 2, sz, True ); in dis_ESC_0F()
22291 delta = dis_xadd_G_E ( &decode_OK, vbi, pfx, 1, delta ); in dis_ESC_0F()
22299 delta = dis_xadd_G_E ( &decode_OK, vbi, pfx, sz, delta ); in dis_ESC_0F()
22330 if (have66(pfx)) goto decode_failure; in dis_ESC_0F()
22337 if (haveF2orF3(pfx)) { in dis_ESC_0F()
22342 if (haveF2andF3(pfx) || !haveLOCK(pfx)) goto decode_failure; in dis_ESC_0F()
22345 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F()
22450 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
22455 assign( t1, getIRegRexB(4, pfx, opc-0xC8) ); in dis_ESC_0F()
22457 putIRegRexB(4, pfx, opc-0xC8, mkexpr(t2)); in dis_ESC_0F()
22458 DIP("bswapl %s\n", nameIRegRexB(4, pfx, opc-0xC8)); in dis_ESC_0F()
22464 assign( t1, getIRegRexB(8, pfx, opc-0xC8) ); in dis_ESC_0F()
22466 putIRegRexB(8, pfx, opc-0xC8, mkexpr(t2)); in dis_ESC_0F()
22467 DIP("bswapq %s\n", nameIRegRexB(8, pfx, opc-0xC8)); in dis_ESC_0F()
22481 if (!have66orF2orF3(pfx)) { in dis_ESC_0F()
22558 delta = dis_MMX ( &decode_OK, vbi, pfx, sz, deltaIN ); in dis_ESC_0F()
22586 archinfo, vbi, pfx, sz, deltaIN, dres ); in dis_ESC_0F()
22596 delta = dis_ESC_0F__SSE3 ( &decode_OK, vbi, pfx, sz, deltaIN ); in dis_ESC_0F()
22607 archinfo, vbi, pfx, sz, deltaIN ); in dis_ESC_0F()
22632 Prefix pfx, Int sz, Long deltaIN in dis_ESC_0F38() argument
22642 if (!haveF2orF3(pfx) && !haveVEX(pfx) in dis_ESC_0F38()
22650 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38()
22657 putIRegG(sz, pfx, modrm, mkexpr(dst)); in dis_ESC_0F38()
22658 DIP("movbe %s,%s\n", dis_buf, nameIRegG(sz, pfx, modrm)); in dis_ESC_0F38()
22660 assign(src, getIRegG(sz, pfx, modrm)); in dis_ESC_0F38()
22663 DIP("movbe %s,%s\n", nameIRegG(sz, pfx, modrm), dis_buf); in dis_ESC_0F38()
22681 delta = dis_ESC_0F38__SupSSE3 ( &decode_OK, vbi, pfx, sz, deltaIN ); in dis_ESC_0F38()
22691 delta = dis_ESC_0F38__SSE4 ( &decode_OK, vbi, pfx, sz, deltaIN ); in dis_ESC_0F38()
22709 if (have66noF2noF3(pfx) && (archinfo->hwcaps & VEX_HWCAPS_AMD64_AVX)) { in dis_ESC_0F38()
22713 delta = dis_op2_E_G ( vbi, pfx, Iop_Add8, WithFlagCarryX, True, in dis_ESC_0F38()
22717 if (haveF3no66noF2(pfx) && (archinfo->hwcaps & VEX_HWCAPS_AMD64_AVX)) { in dis_ESC_0F38()
22718 delta = dis_op2_E_G ( vbi, pfx, Iop_Add8, WithFlagOverX, True, in dis_ESC_0F38()
22750 Prefix pfx, Int sz, Long deltaIN in dis_ESC_0F3A() argument
22768 delta = dis_ESC_0F3A__SupSSE3 ( &decode_OK, vbi, pfx, sz, deltaIN ); in dis_ESC_0F3A()
22778 delta = dis_ESC_0F3A__SSE4 ( &decode_OK, vbi, pfx, sz, deltaIN ); in dis_ESC_0F3A()
22797 Prefix pfx, Long delta, const HChar* name, in dis_VEX_NDS_128_AnySimdPfx_0F_WIG() argument
22806 UInt rD = gregOfRexRM(pfx, modrm); in dis_VEX_NDS_128_AnySimdPfx_0F_WIG()
22807 UInt rSL = getVexNvvvv(pfx); in dis_VEX_NDS_128_AnySimdPfx_0F_WIG()
22813 vassert(0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*WIG?*/); in dis_VEX_NDS_128_AnySimdPfx_0F_WIG()
22819 UInt rSR = eregOfRexRM(pfx, modrm); in dis_VEX_NDS_128_AnySimdPfx_0F_WIG()
22825 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_VEX_NDS_128_AnySimdPfx_0F_WIG()
22865 Prefix pfx, Long delta, const HChar* name, in dis_VEX_NDS_128_AnySimdPfx_0F_WIG_simple() argument
22870 uses_vvvv, vbi, pfx, delta, name, op, NULL, False, False); in dis_VEX_NDS_128_AnySimdPfx_0F_WIG_simple()
22880 Prefix pfx, Long delta, const HChar* name, in dis_VEX_NDS_128_AnySimdPfx_0F_WIG_complex() argument
22885 uses_vvvv, vbi, pfx, delta, name, in dis_VEX_NDS_128_AnySimdPfx_0F_WIG_complex()
22893 Prefix pfx, Long delta, in dis_AVX128_shiftV_byE() argument
22901 UInt rG = gregOfRexRM(pfx,modrm); in dis_AVX128_shiftV_byE()
22902 UInt rV = getVexNvvvv(pfx);; in dis_AVX128_shiftV_byE()
22908 UInt rE = eregOfRexRM(pfx,modrm); in dis_AVX128_shiftV_byE()
22914 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX128_shiftV_byE()
22967 Prefix pfx, Long delta, in dis_AVX256_shiftV_byE() argument
22975 UInt rG = gregOfRexRM(pfx,modrm); in dis_AVX256_shiftV_byE()
22976 UInt rV = getVexNvvvv(pfx);; in dis_AVX256_shiftV_byE()
22982 UInt rE = eregOfRexRM(pfx,modrm); in dis_AVX256_shiftV_byE()
22988 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX256_shiftV_byE()
23043 Prefix pfx, Long delta, in dis_AVX_var_shiftV_byE() argument
23050 UInt rG = gregOfRexRM(pfx,modrm); in dis_AVX_var_shiftV_byE()
23051 UInt rV = getVexNvvvv(pfx);; in dis_AVX_var_shiftV_byE()
23056 UInt rE = eregOfRexRM(pfx,modrm); in dis_AVX_var_shiftV_byE()
23067 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX_var_shiftV_byE()
23153 Long dis_AVX128_shiftE_to_V_imm( Prefix pfx, in dis_AVX128_shiftE_to_V_imm() argument
23160 UInt rD = getVexNvvvv(pfx); in dis_AVX128_shiftE_to_V_imm()
23169 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_AVX128_shiftE_to_V_imm()
23171 assign( e0, getXMMReg(eregOfRexRM(pfx,rm)) ); in dis_AVX128_shiftE_to_V_imm()
23210 Long dis_AVX256_shiftE_to_V_imm( Prefix pfx, in dis_AVX256_shiftE_to_V_imm() argument
23217 UInt rD = getVexNvvvv(pfx); in dis_AVX256_shiftE_to_V_imm()
23226 nameYMMReg(eregOfRexRM(pfx,rm)), in dis_AVX256_shiftE_to_V_imm()
23228 assign( e0, getYMMReg(eregOfRexRM(pfx,rm)) ); in dis_AVX256_shiftE_to_V_imm()
23274 Prefix pfx, Long delta, in dis_AVX128_E_V_to_G_lo64() argument
23281 UInt rG = gregOfRexRM(pfx,rm); in dis_AVX128_E_V_to_G_lo64()
23282 UInt rV = getVexNvvvv(pfx); in dis_AVX128_E_V_to_G_lo64()
23285 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX128_E_V_to_G_lo64()
23294 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX128_E_V_to_G_lo64()
23317 Prefix pfx, Long delta, in dis_AVX128_E_V_to_G_lo64_unary() argument
23324 UInt rG = gregOfRexRM(pfx,rm); in dis_AVX128_E_V_to_G_lo64_unary()
23325 UInt rV = getVexNvvvv(pfx); in dis_AVX128_E_V_to_G_lo64_unary()
23330 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX128_E_V_to_G_lo64_unary()
23336 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX128_E_V_to_G_lo64_unary()
23364 Prefix pfx, Long delta, in dis_AVX128_E_V_to_G_lo32_unary() argument
23371 UInt rG = gregOfRexRM(pfx,rm); in dis_AVX128_E_V_to_G_lo32_unary()
23372 UInt rV = getVexNvvvv(pfx); in dis_AVX128_E_V_to_G_lo32_unary()
23377 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX128_E_V_to_G_lo32_unary()
23383 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX128_E_V_to_G_lo32_unary()
23411 Prefix pfx, Long delta, in dis_AVX128_E_V_to_G_lo32() argument
23418 UInt rG = gregOfRexRM(pfx,rm); in dis_AVX128_E_V_to_G_lo32()
23419 UInt rV = getVexNvvvv(pfx); in dis_AVX128_E_V_to_G_lo32()
23422 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX128_E_V_to_G_lo32()
23431 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX128_E_V_to_G_lo32()
23451 Prefix pfx, Long delta, in dis_AVX128_E_V_to_G() argument
23455 uses_vvvv, vbi, pfx, delta, opname, op, in dis_AVX128_E_V_to_G()
23467 Prefix pfx, Long delta, in dis_AVX128_cmp_V_E_to_G() argument
23481 UInt rG = gregOfRexRM(pfx, rm); in dis_AVX128_cmp_V_E_to_G()
23482 UInt rV = getVexNvvvv(pfx); in dis_AVX128_cmp_V_E_to_G()
23491 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX128_cmp_V_E_to_G()
23498 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_AVX128_cmp_V_E_to_G()
23577 Prefix pfx, Long delta, in dis_AVX256_cmp_V_E_to_G() argument
23591 UInt rG = gregOfRexRM(pfx, rm); in dis_AVX256_cmp_V_E_to_G()
23592 UInt rV = getVexNvvvv(pfx); in dis_AVX256_cmp_V_E_to_G()
23606 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX256_cmp_V_E_to_G()
23613 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_AVX256_cmp_V_E_to_G()
23647 Prefix pfx, Long delta, in dis_AVX128_E_to_G_unary() argument
23657 UInt rG = gregOfRexRM(pfx, rm); in dis_AVX128_E_to_G_unary()
23659 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX128_E_to_G_unary()
23664 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX128_E_to_G_unary()
23680 Prefix pfx, Long delta, in dis_AVX128_E_to_G_unary_all() argument
23688 UInt rG = gregOfRexRM(pfx, rm); in dis_AVX128_E_to_G_unary_all()
23690 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX128_E_to_G_unary_all()
23695 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX128_E_to_G_unary_all()
23716 Prefix pfx, Long delta, const HChar* name, in dis_VEX_NDS_256_AnySimdPfx_0F_WIG() argument
23725 UInt rD = gregOfRexRM(pfx, modrm); in dis_VEX_NDS_256_AnySimdPfx_0F_WIG()
23726 UInt rSL = getVexNvvvv(pfx); in dis_VEX_NDS_256_AnySimdPfx_0F_WIG()
23732 vassert(1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*WIG?*/); in dis_VEX_NDS_256_AnySimdPfx_0F_WIG()
23738 UInt rSR = eregOfRexRM(pfx, modrm); in dis_VEX_NDS_256_AnySimdPfx_0F_WIG()
23744 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_VEX_NDS_256_AnySimdPfx_0F_WIG()
23783 Prefix pfx, Long delta, in dis_AVX256_E_V_to_G() argument
23787 uses_vvvv, vbi, pfx, delta, opname, op, in dis_AVX256_E_V_to_G()
23799 Prefix pfx, Long delta, const HChar* name, in dis_VEX_NDS_256_AnySimdPfx_0F_WIG_simple() argument
23804 uses_vvvv, vbi, pfx, delta, name, op, NULL, False, False); in dis_VEX_NDS_256_AnySimdPfx_0F_WIG_simple()
23814 Prefix pfx, Long delta, const HChar* name, in dis_VEX_NDS_256_AnySimdPfx_0F_WIG_complex() argument
23819 uses_vvvv, vbi, pfx, delta, name, in dis_VEX_NDS_256_AnySimdPfx_0F_WIG_complex()
23828 Prefix pfx, Long delta, in dis_AVX256_E_to_G_unary() argument
23838 UInt rG = gregOfRexRM(pfx, rm); in dis_AVX256_E_to_G_unary()
23840 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX256_E_to_G_unary()
23845 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX256_E_to_G_unary()
23861 Prefix pfx, Long delta, in dis_AVX256_E_to_G_unary_all() argument
23869 UInt rG = gregOfRexRM(pfx, rm); in dis_AVX256_E_to_G_unary_all()
23871 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX256_E_to_G_unary_all()
23876 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX256_E_to_G_unary_all()
23889 static Long dis_CVTDQ2PD_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTDQ2PD_256() argument
23897 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTDQ2PD_256()
23899 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTDQ2PD_256()
23904 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTDQ2PD_256()
23925 static Long dis_CVTPD2PS_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTPD2PS_256() argument
23932 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTPD2PS_256()
23936 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTPD2PS_256()
23941 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTPD2PS_256()
24059 Prefix pfx, Int sz, Long deltaIN in dis_ESC_0F__VEX() argument
24076 if (haveF2no66noF3(pfx) && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
24078 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24079 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24092 if (haveF2no66noF3(pfx) && epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
24094 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24095 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24096 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24111 if (haveF3no66noF2(pfx) && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
24113 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24114 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24127 if (haveF3no66noF2(pfx) && epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
24129 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24130 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24131 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24146 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24148 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24150 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24155 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24163 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24165 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24167 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24172 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24180 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24182 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24184 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24189 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24197 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24199 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24201 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24206 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24218 if (haveF2no66noF3(pfx) && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
24220 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24221 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24230 if (haveF2no66noF3(pfx) && epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
24232 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24233 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24234 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24248 if (haveF3no66noF2(pfx) && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
24250 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24251 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24260 if (haveF3no66noF2(pfx) && epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
24262 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24263 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24264 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24279 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24281 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24283 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24288 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24296 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24298 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24300 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24305 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24313 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24315 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24317 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24322 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24330 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24332 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24334 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24339 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24350 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24351 delta = dis_MOVDDUP_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F__VEX()
24355 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24356 delta = dis_MOVDDUP_256( vbi, pfx, delta ); in dis_ESC_0F__VEX()
24361 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F__VEX()
24364 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24365 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24366 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24382 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx)) in dis_ESC_0F__VEX()
24383 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
24385 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24386 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24387 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24400 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24401 delta = dis_MOVSxDUP_128( vbi, pfx, delta, True/*isAvx*/, in dis_ESC_0F__VEX()
24406 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24407 delta = dis_MOVSxDUP_256( vbi, pfx, delta, True/*isL*/ ); in dis_ESC_0F__VEX()
24417 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx)) in dis_ESC_0F__VEX()
24418 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
24420 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24421 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24433 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24436 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24437 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24442 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24448 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24461 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24464 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24465 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24470 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24476 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24489 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24492 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24493 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24498 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24504 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24517 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24520 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24521 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24526 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24532 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24548 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F__VEX()
24551 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24552 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24553 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24569 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx)) in dis_ESC_0F__VEX()
24570 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
24572 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24573 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24574 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24576 DIP("vmovhp%c %s,%s,%s\n", have66(pfx) ? 'd' : 's', in dis_ESC_0F__VEX()
24587 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24588 delta = dis_MOVSxDUP_128( vbi, pfx, delta, True/*isAvx*/, in dis_ESC_0F__VEX()
24593 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24594 delta = dis_MOVSxDUP_256( vbi, pfx, delta, False/*!isL*/ ); in dis_ESC_0F__VEX()
24604 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx)) in dis_ESC_0F__VEX()
24605 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
24607 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24608 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24611 DIP("vmovhp%c %s,%s\n", have66(pfx) ? 'd' : 's', in dis_ESC_0F__VEX()
24619 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24621 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24623 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24628 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24637 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24639 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24641 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24646 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24655 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24657 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24659 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24664 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24673 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24675 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24677 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24682 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24694 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24696 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24698 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24703 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24712 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24714 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24716 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24721 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24730 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24732 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24734 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24740 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24749 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24751 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24753 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24759 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24773 if (haveF2no66noF3(pfx) && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
24775 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24776 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24779 UInt rS = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24785 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24799 if (haveF2no66noF3(pfx) && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F__VEX()
24801 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24802 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24805 UInt rS = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24811 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24827 if (haveF3no66noF2(pfx) && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F__VEX()
24829 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24830 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24833 UInt rS = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24839 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24857 if (haveF3no66noF2(pfx) && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
24859 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24860 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24863 UInt rS = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24869 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24891 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx)) in dis_ESC_0F__VEX()
24892 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
24894 UInt rS = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24897 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24901 DIP("vmovntp%c %s,%s\n", have66(pfx) ? 'd' : 's', in dis_ESC_0F__VEX()
24907 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx)) in dis_ESC_0F__VEX()
24908 && 1==getVexL(pfx)/*256*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
24910 UInt rS = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24913 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24917 DIP("vmovntp%c %s,%s\n", have66(pfx) ? 'd' : 's', in dis_ESC_0F__VEX()
24925 if (haveF2no66noF3(pfx) && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
24926 delta = dis_CVTxSD2SI( vbi, pfx, delta, True/*isAvx*/, opc, 4); in dis_ESC_0F__VEX()
24930 if (haveF2no66noF3(pfx) && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F__VEX()
24931 delta = dis_CVTxSD2SI( vbi, pfx, delta, True/*isAvx*/, opc, 8); in dis_ESC_0F__VEX()
24935 if (haveF3no66noF2(pfx) && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
24936 delta = dis_CVTxSS2SI( vbi, pfx, delta, True/*isAvx*/, opc, 4); in dis_ESC_0F__VEX()
24940 if (haveF3no66noF2(pfx) && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F__VEX()
24941 delta = dis_CVTxSS2SI( vbi, pfx, delta, True/*isAvx*/, opc, 8); in dis_ESC_0F__VEX()
24948 if (haveF2no66noF3(pfx) && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
24949 delta = dis_CVTxSD2SI( vbi, pfx, delta, True/*isAvx*/, opc, 4); in dis_ESC_0F__VEX()
24953 if (haveF2no66noF3(pfx) && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F__VEX()
24954 delta = dis_CVTxSD2SI( vbi, pfx, delta, True/*isAvx*/, opc, 8); in dis_ESC_0F__VEX()
24958 if (haveF3no66noF2(pfx) && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
24959 delta = dis_CVTxSS2SI( vbi, pfx, delta, True/*isAvx*/, opc, 4); in dis_ESC_0F__VEX()
24963 if (haveF3no66noF2(pfx) && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F__VEX()
24964 delta = dis_CVTxSS2SI( vbi, pfx, delta, True/*isAvx*/, opc, 8); in dis_ESC_0F__VEX()
24973 if (have66noF2noF3(pfx)) { in dis_ESC_0F__VEX()
24974 delta = dis_COMISD( vbi, pfx, delta, True/*isAvx*/, opc ); in dis_ESC_0F__VEX()
24979 if (haveNo66noF2noF3(pfx)) { in dis_ESC_0F__VEX()
24980 delta = dis_COMISS( vbi, pfx, delta, True/*isAvx*/, opc ); in dis_ESC_0F__VEX()
24987 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24988 delta = dis_MOVMSKPD_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F__VEX()
24992 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24993 delta = dis_MOVMSKPD_256( vbi, pfx, delta ); in dis_ESC_0F__VEX()
24997 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24998 delta = dis_MOVMSKPS_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F__VEX()
25002 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25003 delta = dis_MOVMSKPS_256( vbi, pfx, delta ); in dis_ESC_0F__VEX()
25010 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
25012 uses_vvvv, vbi, pfx, delta, "vsqrtss", Iop_Sqrt32F0x4 ); in dis_ESC_0F__VEX()
25016 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25018 uses_vvvv, vbi, pfx, delta, "vsqrtps", Iop_Sqrt32Fx4 ); in dis_ESC_0F__VEX()
25022 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25024 uses_vvvv, vbi, pfx, delta, "vsqrtps", Iop_Sqrt32Fx8 ); in dis_ESC_0F__VEX()
25028 if (haveF2no66noF3(pfx)) { in dis_ESC_0F__VEX()
25030 uses_vvvv, vbi, pfx, delta, "vsqrtsd", Iop_Sqrt64F0x2 ); in dis_ESC_0F__VEX()
25034 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25036 uses_vvvv, vbi, pfx, delta, "vsqrtpd", Iop_Sqrt64Fx2 ); in dis_ESC_0F__VEX()
25040 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25042 uses_vvvv, vbi, pfx, delta, "vsqrtpd", Iop_Sqrt64Fx4 ); in dis_ESC_0F__VEX()
25049 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
25051 uses_vvvv, vbi, pfx, delta, "vrsqrtss", in dis_ESC_0F__VEX()
25056 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25058 uses_vvvv, vbi, pfx, delta, "vrsqrtps", Iop_RSqrtEst32Fx4 ); in dis_ESC_0F__VEX()
25062 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25064 uses_vvvv, vbi, pfx, delta, "vrsqrtps", Iop_RSqrtEst32Fx8 ); in dis_ESC_0F__VEX()
25071 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
25073 uses_vvvv, vbi, pfx, delta, "vrcpss", Iop_RecipEst32F0x4 ); in dis_ESC_0F__VEX()
25077 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25079 uses_vvvv, vbi, pfx, delta, "vrcpps", Iop_RecipEst32Fx4 ); in dis_ESC_0F__VEX()
25083 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25085 uses_vvvv, vbi, pfx, delta, "vrcpps", Iop_RecipEst32Fx8 ); in dis_ESC_0F__VEX()
25093 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25095 uses_vvvv, vbi, pfx, delta, "vandpd", Iop_AndV128 ); in dis_ESC_0F__VEX()
25100 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25102 uses_vvvv, vbi, pfx, delta, "vandpd", Iop_AndV256 ); in dis_ESC_0F__VEX()
25106 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25108 uses_vvvv, vbi, pfx, delta, "vandps", Iop_AndV128 ); in dis_ESC_0F__VEX()
25112 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25114 uses_vvvv, vbi, pfx, delta, "vandps", Iop_AndV256 ); in dis_ESC_0F__VEX()
25122 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25124 uses_vvvv, vbi, pfx, delta, "vandpd", Iop_AndV128, in dis_ESC_0F__VEX()
25129 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25131 uses_vvvv, vbi, pfx, delta, "vandpd", Iop_AndV256, in dis_ESC_0F__VEX()
25136 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25138 uses_vvvv, vbi, pfx, delta, "vandps", Iop_AndV128, in dis_ESC_0F__VEX()
25143 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25145 uses_vvvv, vbi, pfx, delta, "vandps", Iop_AndV256, in dis_ESC_0F__VEX()
25154 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25156 uses_vvvv, vbi, pfx, delta, "vorpd", Iop_OrV128 ); in dis_ESC_0F__VEX()
25161 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25163 uses_vvvv, vbi, pfx, delta, "vorpd", Iop_OrV256 ); in dis_ESC_0F__VEX()
25168 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25170 uses_vvvv, vbi, pfx, delta, "vorps", Iop_OrV128 ); in dis_ESC_0F__VEX()
25175 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25177 uses_vvvv, vbi, pfx, delta, "vorps", Iop_OrV256 ); in dis_ESC_0F__VEX()
25185 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25187 uses_vvvv, vbi, pfx, delta, "vxorpd", Iop_XorV128 ); in dis_ESC_0F__VEX()
25192 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25194 uses_vvvv, vbi, pfx, delta, "vxorpd", Iop_XorV256 ); in dis_ESC_0F__VEX()
25199 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25201 uses_vvvv, vbi, pfx, delta, "vxorps", Iop_XorV128 ); in dis_ESC_0F__VEX()
25206 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25208 uses_vvvv, vbi, pfx, delta, "vxorps", Iop_XorV256 ); in dis_ESC_0F__VEX()
25215 if (haveF2no66noF3(pfx)) { in dis_ESC_0F__VEX()
25217 uses_vvvv, vbi, pfx, delta, "vaddsd", Iop_Add64F0x2 ); in dis_ESC_0F__VEX()
25221 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
25223 uses_vvvv, vbi, pfx, delta, "vaddss", Iop_Add32F0x4 ); in dis_ESC_0F__VEX()
25227 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25229 uses_vvvv, vbi, pfx, delta, "vaddps", Iop_Add32Fx4 ); in dis_ESC_0F__VEX()
25233 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25235 uses_vvvv, vbi, pfx, delta, "vaddps", Iop_Add32Fx8 ); in dis_ESC_0F__VEX()
25239 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25241 uses_vvvv, vbi, pfx, delta, "vaddpd", Iop_Add64Fx2 ); in dis_ESC_0F__VEX()
25245 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25247 uses_vvvv, vbi, pfx, delta, "vaddpd", Iop_Add64Fx4 ); in dis_ESC_0F__VEX()
25254 if (haveF2no66noF3(pfx)) { in dis_ESC_0F__VEX()
25256 uses_vvvv, vbi, pfx, delta, "vmulsd", Iop_Mul64F0x2 ); in dis_ESC_0F__VEX()
25260 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
25262 uses_vvvv, vbi, pfx, delta, "vmulss", Iop_Mul32F0x4 ); in dis_ESC_0F__VEX()
25266 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25268 uses_vvvv, vbi, pfx, delta, "vmulps", Iop_Mul32Fx4 ); in dis_ESC_0F__VEX()
25272 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25274 uses_vvvv, vbi, pfx, delta, "vmulps", Iop_Mul32Fx8 ); in dis_ESC_0F__VEX()
25278 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25280 uses_vvvv, vbi, pfx, delta, "vmulpd", Iop_Mul64Fx2 ); in dis_ESC_0F__VEX()
25284 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25286 uses_vvvv, vbi, pfx, delta, "vmulpd", Iop_Mul64Fx4 ); in dis_ESC_0F__VEX()
25293 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25294 delta = dis_CVTPS2PD_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F__VEX()
25298 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25299 delta = dis_CVTPS2PD_256( vbi, pfx, delta ); in dis_ESC_0F__VEX()
25303 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25304 delta = dis_CVTPD2PS_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F__VEX()
25308 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25309 delta = dis_CVTPD2PS_256( vbi, pfx, delta ); in dis_ESC_0F__VEX()
25313 if (haveF2no66noF3(pfx)) { in dis_ESC_0F__VEX()
25315 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
25316 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
25321 UInt rS = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
25327 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25343 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
25345 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
25346 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
25349 UInt rS = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
25355 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25372 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25373 delta = dis_CVTxPS2DQ_128( vbi, pfx, delta, in dis_ESC_0F__VEX()
25378 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25379 delta = dis_CVTxPS2DQ_256( vbi, pfx, delta, in dis_ESC_0F__VEX()
25384 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25385 delta = dis_CVTxPS2DQ_128( vbi, pfx, delta, in dis_ESC_0F__VEX()
25390 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25391 delta = dis_CVTxPS2DQ_256( vbi, pfx, delta, in dis_ESC_0F__VEX()
25396 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25397 delta = dis_CVTDQ2PS_128 ( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F__VEX()
25401 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25402 delta = dis_CVTDQ2PS_256 ( vbi, pfx, delta ); in dis_ESC_0F__VEX()
25409 if (haveF2no66noF3(pfx)) { in dis_ESC_0F__VEX()
25411 uses_vvvv, vbi, pfx, delta, "vsubsd", Iop_Sub64F0x2 ); in dis_ESC_0F__VEX()
25415 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
25417 uses_vvvv, vbi, pfx, delta, "vsubss", Iop_Sub32F0x4 ); in dis_ESC_0F__VEX()
25421 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25423 uses_vvvv, vbi, pfx, delta, "vsubps", Iop_Sub32Fx4 ); in dis_ESC_0F__VEX()
25427 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25429 uses_vvvv, vbi, pfx, delta, "vsubps", Iop_Sub32Fx8 ); in dis_ESC_0F__VEX()
25433 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25435 uses_vvvv, vbi, pfx, delta, "vsubpd", Iop_Sub64Fx2 ); in dis_ESC_0F__VEX()
25439 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25441 uses_vvvv, vbi, pfx, delta, "vsubpd", Iop_Sub64Fx4 ); in dis_ESC_0F__VEX()
25448 if (haveF2no66noF3(pfx)) { in dis_ESC_0F__VEX()
25450 uses_vvvv, vbi, pfx, delta, "vminsd", Iop_Min64F0x2 ); in dis_ESC_0F__VEX()
25454 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
25456 uses_vvvv, vbi, pfx, delta, "vminss", Iop_Min32F0x4 ); in dis_ESC_0F__VEX()
25460 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25462 uses_vvvv, vbi, pfx, delta, "vminps", Iop_Min32Fx4 ); in dis_ESC_0F__VEX()
25466 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25468 uses_vvvv, vbi, pfx, delta, "vminps", Iop_Min32Fx8 ); in dis_ESC_0F__VEX()
25472 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25474 uses_vvvv, vbi, pfx, delta, "vminpd", Iop_Min64Fx2 ); in dis_ESC_0F__VEX()
25478 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25480 uses_vvvv, vbi, pfx, delta, "vminpd", Iop_Min64Fx4 ); in dis_ESC_0F__VEX()
25487 if (haveF2no66noF3(pfx)) { in dis_ESC_0F__VEX()
25489 uses_vvvv, vbi, pfx, delta, "vdivsd", Iop_Div64F0x2 ); in dis_ESC_0F__VEX()
25493 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
25495 uses_vvvv, vbi, pfx, delta, "vdivss", Iop_Div32F0x4 ); in dis_ESC_0F__VEX()
25499 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25501 uses_vvvv, vbi, pfx, delta, "vdivps", Iop_Div32Fx4 ); in dis_ESC_0F__VEX()
25505 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25507 uses_vvvv, vbi, pfx, delta, "vdivps", Iop_Div32Fx8 ); in dis_ESC_0F__VEX()
25511 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25513 uses_vvvv, vbi, pfx, delta, "vdivpd", Iop_Div64Fx2 ); in dis_ESC_0F__VEX()
25517 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25519 uses_vvvv, vbi, pfx, delta, "vdivpd", Iop_Div64Fx4 ); in dis_ESC_0F__VEX()
25526 if (haveF2no66noF3(pfx)) { in dis_ESC_0F__VEX()
25528 uses_vvvv, vbi, pfx, delta, "vmaxsd", Iop_Max64F0x2 ); in dis_ESC_0F__VEX()
25532 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
25534 uses_vvvv, vbi, pfx, delta, "vmaxss", Iop_Max32F0x4 ); in dis_ESC_0F__VEX()
25538 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25540 uses_vvvv, vbi, pfx, delta, "vmaxps", Iop_Max32Fx4 ); in dis_ESC_0F__VEX()
25544 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25546 uses_vvvv, vbi, pfx, delta, "vmaxps", Iop_Max32Fx8 ); in dis_ESC_0F__VEX()
25550 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25552 uses_vvvv, vbi, pfx, delta, "vmaxpd", Iop_Max64Fx2 ); in dis_ESC_0F__VEX()
25556 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25558 uses_vvvv, vbi, pfx, delta, "vmaxpd", Iop_Max64Fx4 ); in dis_ESC_0F__VEX()
25566 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25568 uses_vvvv, vbi, pfx, delta, "vpunpcklbw", in dis_ESC_0F__VEX()
25575 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25577 uses_vvvv, vbi, pfx, delta, "vpunpcklbw", in dis_ESC_0F__VEX()
25586 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25588 uses_vvvv, vbi, pfx, delta, "vpunpcklwd", in dis_ESC_0F__VEX()
25595 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25597 uses_vvvv, vbi, pfx, delta, "vpunpcklwd", in dis_ESC_0F__VEX()
25606 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25608 uses_vvvv, vbi, pfx, delta, "vpunpckldq", in dis_ESC_0F__VEX()
25615 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25617 uses_vvvv, vbi, pfx, delta, "vpunpckldq", in dis_ESC_0F__VEX()
25626 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25628 uses_vvvv, vbi, pfx, delta, "vpacksswb", in dis_ESC_0F__VEX()
25635 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25637 uses_vvvv, vbi, pfx, delta, "vpacksswb", in dis_ESC_0F__VEX()
25646 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25648 uses_vvvv, vbi, pfx, delta, "vpcmpgtb", Iop_CmpGT8Sx16 ); in dis_ESC_0F__VEX()
25653 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25655 uses_vvvv, vbi, pfx, delta, "vpcmpgtb", Iop_CmpGT8Sx32 ); in dis_ESC_0F__VEX()
25663 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25665 uses_vvvv, vbi, pfx, delta, "vpcmpgtw", Iop_CmpGT16Sx8 ); in dis_ESC_0F__VEX()
25670 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25672 uses_vvvv, vbi, pfx, delta, "vpcmpgtw", Iop_CmpGT16Sx16 ); in dis_ESC_0F__VEX()
25680 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25682 uses_vvvv, vbi, pfx, delta, "vpcmpgtd", Iop_CmpGT32Sx4 ); in dis_ESC_0F__VEX()
25687 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25689 uses_vvvv, vbi, pfx, delta, "vpcmpgtd", Iop_CmpGT32Sx8 ); in dis_ESC_0F__VEX()
25697 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25699 uses_vvvv, vbi, pfx, delta, "vpackuswb", in dis_ESC_0F__VEX()
25706 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25708 uses_vvvv, vbi, pfx, delta, "vpackuswb", in dis_ESC_0F__VEX()
25717 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25719 uses_vvvv, vbi, pfx, delta, "vpunpckhbw", in dis_ESC_0F__VEX()
25726 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25728 uses_vvvv, vbi, pfx, delta, "vpunpckhbw", in dis_ESC_0F__VEX()
25737 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25739 uses_vvvv, vbi, pfx, delta, "vpunpckhwd", in dis_ESC_0F__VEX()
25746 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25748 uses_vvvv, vbi, pfx, delta, "vpunpckhwd", in dis_ESC_0F__VEX()
25757 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25759 uses_vvvv, vbi, pfx, delta, "vpunpckhdq", in dis_ESC_0F__VEX()
25766 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25768 uses_vvvv, vbi, pfx, delta, "vpunpckhdq", in dis_ESC_0F__VEX()
25777 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25779 uses_vvvv, vbi, pfx, delta, "vpackssdw", in dis_ESC_0F__VEX()
25786 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25788 uses_vvvv, vbi, pfx, delta, "vpackssdw", in dis_ESC_0F__VEX()
25797 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25799 uses_vvvv, vbi, pfx, delta, "vpunpcklqdq", in dis_ESC_0F__VEX()
25806 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25808 uses_vvvv, vbi, pfx, delta, "vpunpcklqdq", in dis_ESC_0F__VEX()
25817 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25819 uses_vvvv, vbi, pfx, delta, "vpunpckhqdq", in dis_ESC_0F__VEX()
25826 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25828 uses_vvvv, vbi, pfx, delta, "vpunpckhqdq", in dis_ESC_0F__VEX()
25836 if (have66noF2noF3(pfx) in dis_ESC_0F__VEX()
25837 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
25843 gregOfRexRM(pfx,modrm), in dis_ESC_0F__VEX()
25844 unop( Iop_32UtoV128, getIReg32(eregOfRexRM(pfx,modrm)) ) in dis_ESC_0F__VEX()
25846 DIP("vmovd %s, %s\n", nameIReg32(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__VEX()
25847 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__VEX()
25849 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25852 gregOfRexRM(pfx,modrm), in dis_ESC_0F__VEX()
25856 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__VEX()
25861 if (have66noF2noF3(pfx) in dis_ESC_0F__VEX()
25862 && 0==getVexL(pfx)/*128*/ && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F__VEX()
25868 gregOfRexRM(pfx,modrm), in dis_ESC_0F__VEX()
25869 unop( Iop_64UtoV128, getIReg64(eregOfRexRM(pfx,modrm)) ) in dis_ESC_0F__VEX()
25871 DIP("vmovq %s, %s\n", nameIReg64(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__VEX()
25872 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__VEX()
25874 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25877 gregOfRexRM(pfx,modrm), in dis_ESC_0F__VEX()
25881 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__VEX()
25890 if ((have66noF2noF3(pfx) || haveF3no66noF2(pfx)) in dis_ESC_0F__VEX()
25891 && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25893 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
25895 Bool isA = have66noF2noF3(pfx); in dis_ESC_0F__VEX()
25898 UInt rS = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
25903 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25915 if ((have66noF2noF3(pfx) || haveF3no66noF2(pfx)) in dis_ESC_0F__VEX()
25916 && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25918 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
25920 Bool isA = have66noF2noF3(pfx); in dis_ESC_0F__VEX()
25923 UInt rS = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
25928 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25942 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25943 delta = dis_PSHUFD_32x4( vbi, pfx, delta, True/*writesYmm*/); in dis_ESC_0F__VEX()
25947 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25948 delta = dis_PSHUFD_32x8( vbi, pfx, delta); in dis_ESC_0F__VEX()
25952 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25953 delta = dis_PSHUFxW_128( vbi, pfx, delta, in dis_ESC_0F__VEX()
25958 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25959 delta = dis_PSHUFxW_256( vbi, pfx, delta, False/*!xIsH*/ ); in dis_ESC_0F__VEX()
25963 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25964 delta = dis_PSHUFxW_128( vbi, pfx, delta, in dis_ESC_0F__VEX()
25969 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25970 delta = dis_PSHUFxW_256( vbi, pfx, delta, True/*xIsH*/ ); in dis_ESC_0F__VEX()
25979 if (have66noF2noF3(pfx) in dis_ESC_0F__VEX()
25980 && 0==getVexL(pfx)/*128*/ in dis_ESC_0F__VEX()
25983 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
25989 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
25995 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
26005 if (have66noF2noF3(pfx) in dis_ESC_0F__VEX()
26006 && 1==getVexL(pfx)/*256*/ in dis_ESC_0F__VEX()
26009 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
26015 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
26021 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
26034 if (have66noF2noF3(pfx) in dis_ESC_0F__VEX()
26035 && 0==getVexL(pfx)/*128*/ in dis_ESC_0F__VEX()
26038 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
26044 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
26050 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
26060 if (have66noF2noF3(pfx) in dis_ESC_0F__VEX()
26061 && 1==getVexL(pfx)/*256*/ in dis_ESC_0F__VEX()
26064 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
26070 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
26076 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
26090 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F__VEX()
26092 Int rS = eregOfRexRM(pfx,getUChar(delta)); in dis_ESC_0F__VEX()
26093 Int rD = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
26114 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
26120 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
26131 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F__VEX()
26133 Int rS = eregOfRexRM(pfx,getUChar(delta)); in dis_ESC_0F__VEX()
26134 Int rD = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
26162 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
26168 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
26180 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26182 uses_vvvv, vbi, pfx, delta, "vpcmpeqb", Iop_CmpEQ8x16 ); in dis_ESC_0F__VEX()
26187 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26189 uses_vvvv, vbi, pfx, delta, "vpcmpeqb", Iop_CmpEQ8x32 ); in dis_ESC_0F__VEX()
26197 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26199 uses_vvvv, vbi, pfx, delta, "vpcmpeqw", Iop_CmpEQ16x8 ); in dis_ESC_0F__VEX()
26204 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26206 uses_vvvv, vbi, pfx, delta, "vpcmpeqw", Iop_CmpEQ16x16 ); in dis_ESC_0F__VEX()
26214 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26216 uses_vvvv, vbi, pfx, delta, "vpcmpeqd", Iop_CmpEQ32x4 ); in dis_ESC_0F__VEX()
26221 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26223 uses_vvvv, vbi, pfx, delta, "vpcmpeqd", Iop_CmpEQ32x8 ); in dis_ESC_0F__VEX()
26230 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26241 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26257 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26263 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26264 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
26266 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26272 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
26285 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26292 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26293 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
26296 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26302 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
26319 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26325 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26326 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
26328 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26334 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
26347 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26354 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26355 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
26358 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26364 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
26387 if (haveF3no66noF2(pfx) in dis_ESC_0F__VEX()
26388 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
26391 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26393 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26398 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
26411 if (have66noF2noF3(pfx) in dis_ESC_0F__VEX()
26412 && 0==getVexL(pfx)/*128*/ && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F__VEX()
26414 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26416 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26421 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
26430 if (have66noF2noF3(pfx) in dis_ESC_0F__VEX()
26431 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
26433 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26435 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26440 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
26452 if ((have66noF2noF3(pfx) || haveF3no66noF2(pfx)) in dis_ESC_0F__VEX()
26453 && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26455 UInt rS = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
26457 Bool isA = have66noF2noF3(pfx); in dis_ESC_0F__VEX()
26461 UInt rD = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
26466 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
26477 if ((have66noF2noF3(pfx) || haveF3no66noF2(pfx)) in dis_ESC_0F__VEX()
26478 && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26480 UInt rS = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
26482 Bool isA = have66noF2noF3(pfx); in dis_ESC_0F__VEX()
26486 UInt rD = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
26491 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
26504 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__VEX()
26505 && 0==getVexL(pfx)/*LZ*/ in dis_ESC_0F__VEX()
26506 && 0==getRexW(pfx) /* be paranoid -- Intel docs don't require this */ in dis_ESC_0F__VEX()
26509 delta = dis_STMXCSR(vbi, pfx, delta, True/*isAvx*/); in dis_ESC_0F__VEX()
26513 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__VEX()
26514 && 0==getVexL(pfx)/*LZ*/ in dis_ESC_0F__VEX()
26515 && 0==getRexW(pfx) /* be paranoid -- Intel docs don't require this */ in dis_ESC_0F__VEX()
26518 delta = dis_LDMXCSR(vbi, pfx, delta, True/*isAvx*/); in dis_ESC_0F__VEX()
26526 if (haveF2no66noF3(pfx)) { in dis_ESC_0F__VEX()
26528 delta = dis_AVX128_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26536 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
26538 delta = dis_AVX128_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26546 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26548 delta = dis_AVX128_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26556 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26558 delta = dis_AVX256_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26565 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26567 delta = dis_AVX128_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26575 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26577 delta = dis_AVX256_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26586 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26588 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
26589 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
26596 getIReg32(eregOfRexRM(pfx,modrm))) ); in dis_ESC_0F__VEX()
26599 nameIReg32( eregOfRexRM(pfx, modrm) ), nameXMMReg(rG) ); in dis_ESC_0F__VEX()
26601 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F__VEX()
26620 if (have66noF2noF3(pfx) in dis_ESC_0F__VEX()
26621 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
26623 delta = dis_PEXTRW_128_EregOnly_toG( vbi, pfx, delta, in dis_ESC_0F__VEX()
26633 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26638 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26639 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
26642 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26649 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F__VEX()
26663 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26668 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26669 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
26672 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26679 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F__VEX()
26693 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26698 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26699 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
26702 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26709 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F__VEX()
26723 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26728 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26729 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
26732 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26739 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F__VEX()
26755 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26757 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26762 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26764 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26769 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26771 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26776 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26778 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26786 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26787 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26794 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26795 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26805 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26806 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26812 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26813 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26822 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26823 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26829 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26830 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26840 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26842 uses_vvvv, vbi, pfx, delta, "vpaddq", Iop_Add64x2 ); in dis_ESC_0F__VEX()
26847 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26849 uses_vvvv, vbi, pfx, delta, "vpaddq", Iop_Add64x4 ); in dis_ESC_0F__VEX()
26856 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26858 uses_vvvv, vbi, pfx, delta, "vpmullw", Iop_Mul16x8 ); in dis_ESC_0F__VEX()
26862 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26864 uses_vvvv, vbi, pfx, delta, "vpmullw", Iop_Mul16x16 ); in dis_ESC_0F__VEX()
26874 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F__VEX()
26875 && 0==getRexW(pfx)/*this might be redundant, dunno*/) { in dis_ESC_0F__VEX()
26877 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26882 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
26893 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26894 delta = dis_PMOVMSKB_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F__VEX()
26898 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26899 delta = dis_PMOVMSKB_256( vbi, pfx, delta ); in dis_ESC_0F__VEX()
26906 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26908 uses_vvvv, vbi, pfx, delta, "vpsubusb", Iop_QSub8Ux16 ); in dis_ESC_0F__VEX()
26912 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26914 uses_vvvv, vbi, pfx, delta, "vpsubusb", Iop_QSub8Ux32 ); in dis_ESC_0F__VEX()
26921 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26923 uses_vvvv, vbi, pfx, delta, "vpsubusw", Iop_QSub16Ux8 ); in dis_ESC_0F__VEX()
26927 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26929 uses_vvvv, vbi, pfx, delta, "vpsubusw", Iop_QSub16Ux16 ); in dis_ESC_0F__VEX()
26936 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26938 uses_vvvv, vbi, pfx, delta, "vpminub", Iop_Min8Ux16 ); in dis_ESC_0F__VEX()
26942 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26944 uses_vvvv, vbi, pfx, delta, "vpminub", Iop_Min8Ux32 ); in dis_ESC_0F__VEX()
26952 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26954 uses_vvvv, vbi, pfx, delta, "vpand", Iop_AndV128 ); in dis_ESC_0F__VEX()
26959 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26961 uses_vvvv, vbi, pfx, delta, "vpand", Iop_AndV256 ); in dis_ESC_0F__VEX()
26968 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26970 uses_vvvv, vbi, pfx, delta, "vpaddusb", Iop_QAdd8Ux16 ); in dis_ESC_0F__VEX()
26974 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26976 uses_vvvv, vbi, pfx, delta, "vpaddusb", Iop_QAdd8Ux32 ); in dis_ESC_0F__VEX()
26983 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26985 uses_vvvv, vbi, pfx, delta, "vpaddusw", Iop_QAdd16Ux8 ); in dis_ESC_0F__VEX()
26989 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26991 uses_vvvv, vbi, pfx, delta, "vpaddusw", Iop_QAdd16Ux16 ); in dis_ESC_0F__VEX()
26998 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27000 uses_vvvv, vbi, pfx, delta, "vpmaxub", Iop_Max8Ux16 ); in dis_ESC_0F__VEX()
27004 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27006 uses_vvvv, vbi, pfx, delta, "vpmaxub", Iop_Max8Ux32 ); in dis_ESC_0F__VEX()
27014 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27016 uses_vvvv, vbi, pfx, delta, "vpandn", Iop_AndV128, in dis_ESC_0F__VEX()
27022 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27024 uses_vvvv, vbi, pfx, delta, "vpandn", Iop_AndV256, in dis_ESC_0F__VEX()
27032 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27034 uses_vvvv, vbi, pfx, delta, "vpavgb", Iop_Avg8Ux16 ); in dis_ESC_0F__VEX()
27038 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27040 uses_vvvv, vbi, pfx, delta, "vpavgb", Iop_Avg8Ux32 ); in dis_ESC_0F__VEX()
27047 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27048 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
27054 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27055 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
27064 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27065 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
27071 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27072 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
27081 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27083 uses_vvvv, vbi, pfx, delta, "vpavgw", Iop_Avg16Ux8 ); in dis_ESC_0F__VEX()
27087 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27089 uses_vvvv, vbi, pfx, delta, "vpavgw", Iop_Avg16Ux16 ); in dis_ESC_0F__VEX()
27096 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27098 uses_vvvv, vbi, pfx, delta, "vpmulhuw", Iop_MulHi16Ux8 ); in dis_ESC_0F__VEX()
27102 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27104 uses_vvvv, vbi, pfx, delta, "vpmulhuw", Iop_MulHi16Ux16 ); in dis_ESC_0F__VEX()
27111 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27113 uses_vvvv, vbi, pfx, delta, "vpmulhw", Iop_MulHi16Sx8 ); in dis_ESC_0F__VEX()
27117 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27119 uses_vvvv, vbi, pfx, delta, "vpmulhw", Iop_MulHi16Sx16 ); in dis_ESC_0F__VEX()
27126 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27127 delta = dis_CVTDQ2PD_128(vbi, pfx, delta, True/*isAvx*/); in dis_ESC_0F__VEX()
27131 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27132 delta = dis_CVTDQ2PD_256(vbi, pfx, delta); in dis_ESC_0F__VEX()
27136 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27137 delta = dis_CVTxPD2DQ_128(vbi, pfx, delta, True/*isAvx*/, in dis_ESC_0F__VEX()
27142 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27143 delta = dis_CVTxPD2DQ_256(vbi, pfx, delta, True/*r2zero*/); in dis_ESC_0F__VEX()
27147 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27148 delta = dis_CVTxPD2DQ_128(vbi, pfx, delta, True/*isAvx*/, in dis_ESC_0F__VEX()
27153 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27154 delta = dis_CVTxPD2DQ_256(vbi, pfx, delta, False/*!r2zero*/); in dis_ESC_0F__VEX()
27161 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27163 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
27165 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
27175 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27177 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
27179 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
27192 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27194 uses_vvvv, vbi, pfx, delta, "vpsubsb", Iop_QSub8Sx16 ); in dis_ESC_0F__VEX()
27198 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27200 uses_vvvv, vbi, pfx, delta, "vpsubsb", Iop_QSub8Sx32 ); in dis_ESC_0F__VEX()
27207 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27209 uses_vvvv, vbi, pfx, delta, "vpsubsw", Iop_QSub16Sx8 ); in dis_ESC_0F__VEX()
27213 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27215 uses_vvvv, vbi, pfx, delta, "vpsubsw", Iop_QSub16Sx16 ); in dis_ESC_0F__VEX()
27223 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27225 uses_vvvv, vbi, pfx, delta, "vpminsw", Iop_Min16Sx8 ); in dis_ESC_0F__VEX()
27230 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27232 uses_vvvv, vbi, pfx, delta, "vpminsw", Iop_Min16Sx16 ); in dis_ESC_0F__VEX()
27240 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27242 uses_vvvv, vbi, pfx, delta, "vpor", Iop_OrV128 ); in dis_ESC_0F__VEX()
27247 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27249 uses_vvvv, vbi, pfx, delta, "vpor", Iop_OrV256 ); in dis_ESC_0F__VEX()
27256 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27258 uses_vvvv, vbi, pfx, delta, "vpaddsb", Iop_QAdd8Sx16 ); in dis_ESC_0F__VEX()
27262 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27264 uses_vvvv, vbi, pfx, delta, "vpaddsb", Iop_QAdd8Sx32 ); in dis_ESC_0F__VEX()
27271 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27273 uses_vvvv, vbi, pfx, delta, "vpaddsw", Iop_QAdd16Sx8 ); in dis_ESC_0F__VEX()
27277 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27279 uses_vvvv, vbi, pfx, delta, "vpaddsw", Iop_QAdd16Sx16 ); in dis_ESC_0F__VEX()
27287 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27289 uses_vvvv, vbi, pfx, delta, "vpmaxsw", Iop_Max16Sx8 ); in dis_ESC_0F__VEX()
27294 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27296 uses_vvvv, vbi, pfx, delta, "vpmaxsw", Iop_Max16Sx16 ); in dis_ESC_0F__VEX()
27304 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27306 uses_vvvv, vbi, pfx, delta, "vpxor", Iop_XorV128 ); in dis_ESC_0F__VEX()
27311 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27313 uses_vvvv, vbi, pfx, delta, "vpxor", Iop_XorV256 ); in dis_ESC_0F__VEX()
27320 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27322 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
27325 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
27333 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27335 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
27338 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
27349 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27350 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
27357 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27358 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
27368 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27369 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
27375 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27376 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
27385 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27386 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
27392 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27393 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
27402 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27404 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
27409 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27411 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
27419 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27421 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
27426 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27428 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
27436 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27438 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
27443 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27445 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
27453 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F__VEX()
27455 delta = dis_MASKMOVDQU( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F__VEX()
27463 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27465 uses_vvvv, vbi, pfx, delta, "vpsubb", Iop_Sub8x16 ); in dis_ESC_0F__VEX()
27470 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27472 uses_vvvv, vbi, pfx, delta, "vpsubb", Iop_Sub8x32 ); in dis_ESC_0F__VEX()
27480 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27482 uses_vvvv, vbi, pfx, delta, "vpsubw", Iop_Sub16x8 ); in dis_ESC_0F__VEX()
27487 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27489 uses_vvvv, vbi, pfx, delta, "vpsubw", Iop_Sub16x16 ); in dis_ESC_0F__VEX()
27497 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27499 uses_vvvv, vbi, pfx, delta, "vpsubd", Iop_Sub32x4 ); in dis_ESC_0F__VEX()
27504 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27506 uses_vvvv, vbi, pfx, delta, "vpsubd", Iop_Sub32x8 ); in dis_ESC_0F__VEX()
27514 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27516 uses_vvvv, vbi, pfx, delta, "vpsubq", Iop_Sub64x2 ); in dis_ESC_0F__VEX()
27521 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27523 uses_vvvv, vbi, pfx, delta, "vpsubq", Iop_Sub64x4 ); in dis_ESC_0F__VEX()
27531 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27533 uses_vvvv, vbi, pfx, delta, "vpaddb", Iop_Add8x16 ); in dis_ESC_0F__VEX()
27538 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27540 uses_vvvv, vbi, pfx, delta, "vpaddb", Iop_Add8x32 ); in dis_ESC_0F__VEX()
27548 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27550 uses_vvvv, vbi, pfx, delta, "vpaddw", Iop_Add16x8 ); in dis_ESC_0F__VEX()
27555 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27557 uses_vvvv, vbi, pfx, delta, "vpaddw", Iop_Add16x16 ); in dis_ESC_0F__VEX()
27565 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
27567 uses_vvvv, vbi, pfx, delta, "vpaddd", Iop_Add32x4 ); in dis_ESC_0F__VEX()
27572 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
27574 uses_vvvv, vbi, pfx, delta, "vpaddd", Iop_Add32x8 ); in dis_ESC_0F__VEX()
27673 const VexAbiInfo* vbi, Prefix pfx, Long delta, in dis_SHIFTX() argument
27678 Int size = getRexW(pfx) ? 8 : 4; in dis_SHIFTX()
27684 assign( amt, getIRegV(size,pfx) ); in dis_SHIFTX()
27686 assign( src, getIRegE(size,pfx,rm) ); in dis_SHIFTX()
27687 DIP("%s %s,%s,%s\n", opname, nameIRegV(size,pfx), in dis_SHIFTX()
27688 nameIRegE(size,pfx,rm), nameIRegG(size,pfx,rm)); in dis_SHIFTX()
27691 IRTemp addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_SHIFTX()
27693 DIP("%s %s,%s,%s\n", opname, nameIRegV(size,pfx), dis_buf, in dis_SHIFTX()
27694 nameIRegG(size,pfx,rm)); in dis_SHIFTX()
27698 putIRegG( size, pfx, rm, in dis_SHIFTX()
27708 static Long dis_FMA ( const VexAbiInfo* vbi, Prefix pfx, Long delta, UChar opc ) in dis_FMA() argument
27711 UInt rG = gregOfRexRM(pfx, modrm); in dis_FMA()
27712 UInt rV = getVexNvvvv(pfx); in dis_FMA()
27714 IRType ty = getRexW(pfx) ? Ity_F64 : Ity_F32; in dis_FMA()
27715 IRType vty = scalar ? ty : (getVexL(pfx) ? Ity_V256 : Ity_V128); in dis_FMA()
27774 UInt rE = eregOfRexRM(pfx, modrm); in dis_FMA()
27789 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_FMA()
27857 Prefix pfx, Long delta, in dis_VMASKMOV() argument
27865 UInt rG = gregOfRexRM(pfx,modrm); in dis_VMASKMOV()
27866 UInt rV = getVexNvvvv(pfx); in dis_VMASKMOV()
27868 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_VMASKMOV()
27927 Prefix pfx, Long delta, in dis_VGATHER() argument
27935 UInt rG = gregOfRexRM(pfx,modrm); in dis_VGATHER()
27936 UInt rV = getVexNvvvv(pfx); in dis_VGATHER()
27941 addr = disAVSIBMode ( &alen, vbi, pfx, delta, dis_buf, &rI, in dis_VGATHER()
27993 addr_expr = handleAddrOverrides(vbi, pfx, addr_expr); in dis_VGATHER()
28030 Prefix pfx, Int sz, Long deltaIN in dis_ESC_0F38__VEX() argument
28046 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28048 uses_vvvv, vbi, pfx, delta, "vpshufb", math_PSHUFB_XMM ); in dis_ESC_0F38__VEX()
28053 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28055 uses_vvvv, vbi, pfx, delta, "vpshufb", math_PSHUFB_YMM ); in dis_ESC_0F38__VEX()
28066 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28067 delta = dis_PHADD_128( vbi, pfx, delta, True/*isAvx*/, opc ); in dis_ESC_0F38__VEX()
28074 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28075 delta = dis_PHADD_256( vbi, pfx, delta, opc ); in dis_ESC_0F38__VEX()
28083 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28085 uses_vvvv, vbi, pfx, delta, "vpmaddubsw", in dis_ESC_0F38__VEX()
28090 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28092 uses_vvvv, vbi, pfx, delta, "vpmaddubsw", in dis_ESC_0F38__VEX()
28104 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28105 delta = dis_PHADD_128( vbi, pfx, delta, True/*isAvx*/, opc ); in dis_ESC_0F38__VEX()
28112 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28113 delta = dis_PHADD_256( vbi, pfx, delta, opc ); in dis_ESC_0F38__VEX()
28125 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28133 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F38__VEX()
28134 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F38__VEX()
28146 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F38__VEX()
28152 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28175 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28184 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F38__VEX()
28185 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F38__VEX()
28197 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F38__VEX()
28203 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28233 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28239 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F38__VEX()
28240 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F38__VEX()
28245 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F38__VEX()
28251 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28272 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28278 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F38__VEX()
28279 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F38__VEX()
28284 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F38__VEX()
28290 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28319 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
28320 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
28322 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28323 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F38__VEX()
28326 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28332 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28346 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
28347 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
28349 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28350 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F38__VEX()
28353 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28359 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28376 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
28377 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
28379 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28380 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F38__VEX()
28383 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28389 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28403 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
28404 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
28406 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28407 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F38__VEX()
28410 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28416 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28433 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28434 delta = dis_xTESTy_128( vbi, pfx, delta, True/*isAvx*/, 32 ); in dis_ESC_0F38__VEX()
28438 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28439 delta = dis_xTESTy_256( vbi, pfx, delta, 32 ); in dis_ESC_0F38__VEX()
28446 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28447 delta = dis_xTESTy_128( vbi, pfx, delta, True/*isAvx*/, 64 ); in dis_ESC_0F38__VEX()
28451 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28452 delta = dis_xTESTy_256( vbi, pfx, delta, 64 ); in dis_ESC_0F38__VEX()
28459 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
28460 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
28462 uses_vvvv, vbi, pfx, delta, "vpermps", math_VPERMD ); in dis_ESC_0F38__VEX()
28469 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28470 delta = dis_xTESTy_128( vbi, pfx, delta, True/*isAvx*/, 0 ); in dis_ESC_0F38__VEX()
28474 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28475 delta = dis_xTESTy_256( vbi, pfx, delta, 0 ); in dis_ESC_0F38__VEX()
28482 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
28483 && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28486 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28487 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28499 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
28500 && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28503 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28504 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28517 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
28518 && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28521 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28522 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28534 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
28535 && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28538 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28539 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28555 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
28556 && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28559 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28560 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28571 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
28572 && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28575 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28576 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28590 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
28591 && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28594 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28595 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28607 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28609 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F38__VEX()
28614 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28616 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F38__VEX()
28624 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28626 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F38__VEX()
28631 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28633 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F38__VEX()
28641 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28643 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F38__VEX()
28648 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28650 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F38__VEX()
28659 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28660 delta = dis_PMOVxXBW_128( vbi, pfx, delta, in dis_ESC_0F38__VEX()
28666 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28667 delta = dis_PMOVxXBW_256( vbi, pfx, delta, False/*!xIsZ*/ ); in dis_ESC_0F38__VEX()
28675 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28676 delta = dis_PMOVxXBD_128( vbi, pfx, delta, in dis_ESC_0F38__VEX()
28682 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28683 delta = dis_PMOVxXBD_256( vbi, pfx, delta, False/*!xIsZ*/ ); in dis_ESC_0F38__VEX()
28691 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28692 delta = dis_PMOVSXBQ_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F38__VEX()
28697 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28698 delta = dis_PMOVSXBQ_256( vbi, pfx, delta ); in dis_ESC_0F38__VEX()
28705 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28706 delta = dis_PMOVxXWD_128( vbi, pfx, delta, in dis_ESC_0F38__VEX()
28711 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28712 delta = dis_PMOVxXWD_256( vbi, pfx, delta, False/*!xIsZ*/ ); in dis_ESC_0F38__VEX()
28719 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28720 delta = dis_PMOVSXWQ_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F38__VEX()
28724 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28725 delta = dis_PMOVSXWQ_256( vbi, pfx, delta ); in dis_ESC_0F38__VEX()
28732 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28733 delta = dis_PMOVxXDQ_128( vbi, pfx, delta, in dis_ESC_0F38__VEX()
28738 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28739 delta = dis_PMOVxXDQ_256( vbi, pfx, delta, False/*!xIsZ*/ ); in dis_ESC_0F38__VEX()
28746 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28748 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F38__VEX()
28753 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28755 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F38__VEX()
28764 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28766 uses_vvvv, vbi, pfx, delta, "vpcmpeqq", Iop_CmpEQ64x2 ); in dis_ESC_0F38__VEX()
28771 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28773 uses_vvvv, vbi, pfx, delta, "vpcmpeqq", Iop_CmpEQ64x4 ); in dis_ESC_0F38__VEX()
28780 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28783 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28785 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28794 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28797 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28799 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28812 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28814 uses_vvvv, vbi, pfx, delta, "vpackusdw", in dis_ESC_0F38__VEX()
28821 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28823 uses_vvvv, vbi, pfx, delta, "vpackusdw", in dis_ESC_0F38__VEX()
28831 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28832 && 0==getRexW(pfx)/*W0*/ in dis_ESC_0F38__VEX()
28834 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovps", in dis_ESC_0F38__VEX()
28839 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28840 && 0==getRexW(pfx)/*W0*/ in dis_ESC_0F38__VEX()
28842 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovps", in dis_ESC_0F38__VEX()
28850 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28851 && 0==getRexW(pfx)/*W0*/ in dis_ESC_0F38__VEX()
28853 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovpd", in dis_ESC_0F38__VEX()
28858 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28859 && 0==getRexW(pfx)/*W0*/ in dis_ESC_0F38__VEX()
28861 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovpd", in dis_ESC_0F38__VEX()
28869 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28870 && 0==getRexW(pfx)/*W0*/ in dis_ESC_0F38__VEX()
28872 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovps", in dis_ESC_0F38__VEX()
28877 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28878 && 0==getRexW(pfx)/*W0*/ in dis_ESC_0F38__VEX()
28880 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovps", in dis_ESC_0F38__VEX()
28888 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28889 && 0==getRexW(pfx)/*W0*/ in dis_ESC_0F38__VEX()
28891 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovpd", in dis_ESC_0F38__VEX()
28896 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28897 && 0==getRexW(pfx)/*W0*/ in dis_ESC_0F38__VEX()
28899 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovpd", in dis_ESC_0F38__VEX()
28908 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28909 delta = dis_PMOVxXBW_128( vbi, pfx, delta, in dis_ESC_0F38__VEX()
28915 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28916 delta = dis_PMOVxXBW_256( vbi, pfx, delta, True/*xIsZ*/ ); in dis_ESC_0F38__VEX()
28924 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28925 delta = dis_PMOVxXBD_128( vbi, pfx, delta, in dis_ESC_0F38__VEX()
28931 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28932 delta = dis_PMOVxXBD_256( vbi, pfx, delta, True/*xIsZ*/ ); in dis_ESC_0F38__VEX()
28940 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28941 delta = dis_PMOVZXBQ_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F38__VEX()
28946 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28947 delta = dis_PMOVZXBQ_256( vbi, pfx, delta ); in dis_ESC_0F38__VEX()
28955 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28956 delta = dis_PMOVxXWD_128( vbi, pfx, delta, in dis_ESC_0F38__VEX()
28962 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28963 delta = dis_PMOVxXWD_256( vbi, pfx, delta, True/*xIsZ*/ ); in dis_ESC_0F38__VEX()
28970 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28971 delta = dis_PMOVZXWQ_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F38__VEX()
28975 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28976 delta = dis_PMOVZXWQ_256( vbi, pfx, delta ); in dis_ESC_0F38__VEX()
28983 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28984 delta = dis_PMOVxXDQ_128( vbi, pfx, delta, in dis_ESC_0F38__VEX()
28989 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28990 delta = dis_PMOVxXDQ_256( vbi, pfx, delta, True/*xIsZ*/ ); in dis_ESC_0F38__VEX()
28997 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
28998 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
29000 uses_vvvv, vbi, pfx, delta, "vpermd", math_VPERMD ); in dis_ESC_0F38__VEX()
29008 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
29010 uses_vvvv, vbi, pfx, delta, "vpcmpgtq", Iop_CmpGT64Sx2 ); in dis_ESC_0F38__VEX()
29015 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
29017 uses_vvvv, vbi, pfx, delta, "vpcmpgtq", Iop_CmpGT64Sx4 ); in dis_ESC_0F38__VEX()
29025 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
29027 uses_vvvv, vbi, pfx, delta, "vpminsb", Iop_Min8Sx16 ); in dis_ESC_0F38__VEX()
29032 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
29034 uses_vvvv, vbi, pfx, delta, "vpminsb", Iop_Min8Sx32 ); in dis_ESC_0F38__VEX()
29042 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
29044 uses_vvvv, vbi, pfx, delta, "vpminsd", Iop_Min32Sx4 ); in dis_ESC_0F38__VEX()
29049 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
29051 uses_vvvv, vbi, pfx, delta, "vpminsd", Iop_Min32Sx8 ); in dis_ESC_0F38__VEX()
29059 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
29061 uses_vvvv, vbi, pfx, delta, "vpminuw", Iop_Min16Ux8 ); in dis_ESC_0F38__VEX()
29066 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
29068 uses_vvvv, vbi, pfx, delta, "vpminuw", Iop_Min16Ux16 ); in dis_ESC_0F38__VEX()
29076 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
29078 uses_vvvv, vbi, pfx, delta, "vpminud", Iop_Min32Ux4 ); in dis_ESC_0F38__VEX()
29083 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
29085 uses_vvvv, vbi, pfx, delta, "vpminud", Iop_Min32Ux8 ); in dis_ESC_0F38__VEX()
29093 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
29095 uses_vvvv, vbi, pfx, delta, "vpmaxsb", Iop_Max8Sx16 ); in dis_ESC_0F38__VEX()
29100 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
29102 uses_vvvv, vbi, pfx, delta, "vpmaxsb", Iop_Max8Sx32 ); in dis_ESC_0F38__VEX()
29110 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
29112 uses_vvvv, vbi, pfx, delta, "vpmaxsd", Iop_Max32Sx4 ); in dis_ESC_0F38__VEX()
29117 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
29119 uses_vvvv, vbi, pfx, delta, "vpmaxsd", Iop_Max32Sx8 ); in dis_ESC_0F38__VEX()
29127 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
29129 uses_vvvv, vbi, pfx, delta, "vpmaxuw", Iop_Max16Ux8 ); in dis_ESC_0F38__VEX()
29134 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
29136 uses_vvvv, vbi, pfx, delta, "vpmaxuw", Iop_Max16Ux16 ); in dis_ESC_0F38__VEX()
29144 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
29146 uses_vvvv, vbi, pfx, delta, "vpmaxud", Iop_Max32Ux4 ); in dis_ESC_0F38__VEX()
29151 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
29153 uses_vvvv, vbi, pfx, delta, "vpmaxud", Iop_Max32Ux8 ); in dis_ESC_0F38__VEX()
29161 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
29163 uses_vvvv, vbi, pfx, delta, "vpmulld", Iop_Mul32x4 ); in dis_ESC_0F38__VEX()
29168 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
29170 uses_vvvv, vbi, pfx, delta, "vpmulld", Iop_Mul32x8 ); in dis_ESC_0F38__VEX()
29177 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
29178 delta = dis_PHMINPOSUW_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F38__VEX()
29186 if (have66noF2noF3(pfx) && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
29187 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsrlvd", in dis_ESC_0F38__VEX()
29188 Iop_Shr32, 1==getVexL(pfx) ); in dis_ESC_0F38__VEX()
29194 if (have66noF2noF3(pfx) && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F38__VEX()
29195 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsrlvq", in dis_ESC_0F38__VEX()
29196 Iop_Shr64, 1==getVexL(pfx) ); in dis_ESC_0F38__VEX()
29205 if (have66noF2noF3(pfx) && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
29206 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsravd", in dis_ESC_0F38__VEX()
29207 Iop_Sar32, 1==getVexL(pfx) ); in dis_ESC_0F38__VEX()
29216 if (have66noF2noF3(pfx) && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
29217 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsllvd", in dis_ESC_0F38__VEX()
29218 Iop_Shl32, 1==getVexL(pfx) ); in dis_ESC_0F38__VEX()
29224 if (have66noF2noF3(pfx) && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F38__VEX()
29225 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsllvq", in dis_ESC_0F38__VEX()
29226 Iop_Shl64, 1==getVexL(pfx) ); in dis_ESC_0F38__VEX()
29234 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29235 && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
29237 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
29240 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
29245 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29257 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29258 && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
29260 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
29263 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
29268 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29284 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29285 && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
29287 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
29290 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
29295 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29305 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29306 && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
29308 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
29311 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
29316 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29330 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
29331 && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29334 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
29335 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29347 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29348 && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
29350 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
29353 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
29358 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29374 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29375 && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
29377 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
29380 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
29385 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29405 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29406 && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
29408 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
29411 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
29416 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29430 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29431 && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
29433 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
29436 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
29441 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29459 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29460 && 0==getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29461 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovd", in dis_ESC_0F38__VEX()
29466 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29467 && 0==getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29468 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovd", in dis_ESC_0F38__VEX()
29473 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29474 && 1==getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29475 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovq", in dis_ESC_0F38__VEX()
29480 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29481 && 1==getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29482 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovq", in dis_ESC_0F38__VEX()
29490 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29491 && 0==getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29492 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovd", in dis_ESC_0F38__VEX()
29497 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29498 && 0==getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29499 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovd", in dis_ESC_0F38__VEX()
29504 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29505 && 1==getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29506 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovq", in dis_ESC_0F38__VEX()
29511 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29512 && 1==getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29513 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovq", in dis_ESC_0F38__VEX()
29521 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29522 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29524 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherdd", in dis_ESC_0F38__VEX()
29530 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29531 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29533 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherdd", in dis_ESC_0F38__VEX()
29539 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29540 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29542 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherdq", in dis_ESC_0F38__VEX()
29548 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29549 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29551 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherdq", in dis_ESC_0F38__VEX()
29560 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29561 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29563 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherqd", in dis_ESC_0F38__VEX()
29569 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29570 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29572 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherqd", in dis_ESC_0F38__VEX()
29578 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29579 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29581 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherqq", in dis_ESC_0F38__VEX()
29587 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29588 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29590 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherqq", in dis_ESC_0F38__VEX()
29599 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29600 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29602 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherdps", in dis_ESC_0F38__VEX()
29608 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29609 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29611 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherdps", in dis_ESC_0F38__VEX()
29617 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29618 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29620 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherdpd", in dis_ESC_0F38__VEX()
29626 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29627 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29629 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherdpd", in dis_ESC_0F38__VEX()
29638 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29639 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29641 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherqps", in dis_ESC_0F38__VEX()
29647 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29648 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29650 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherqps", in dis_ESC_0F38__VEX()
29656 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29657 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29659 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherqpd", in dis_ESC_0F38__VEX()
29665 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29666 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29668 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherqpd", in dis_ESC_0F38__VEX()
29774 if (have66noF2noF3(pfx)) { in dis_ESC_0F38__VEX()
29775 delta = dis_FMA( vbi, pfx, delta, opc ); in dis_ESC_0F38__VEX()
29792 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
29793 delta = dis_AESx( vbi, pfx, delta, True/*!isAvx*/, opc ); in dis_ESC_0F38__VEX()
29802 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F38__VEX()
29803 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F38__VEX()
29810 assign( src1, getIRegV(size,pfx) ); in dis_ESC_0F38__VEX()
29812 assign( src2, getIRegE(size,pfx,rm) ); in dis_ESC_0F38__VEX()
29813 DIP("andn %s,%s,%s\n", nameIRegE(size,pfx,rm), in dis_ESC_0F38__VEX()
29814 nameIRegV(size,pfx), nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
29817 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29819 DIP("andn %s,%s,%s\n", dis_buf, nameIRegV(size,pfx), in dis_ESC_0F38__VEX()
29820 nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
29827 putIRegG( size, pfx, rm, mkexpr(dst) ); in dis_ESC_0F38__VEX()
29841 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ in dis_ESC_0F38__VEX()
29842 && !haveREX(pfx) && gregLO3ofRM(getUChar(delta)) == 3) { in dis_ESC_0F38__VEX()
29843 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F38__VEX()
29850 assign( src, getIRegE(size,pfx,rm) ); in dis_ESC_0F38__VEX()
29851 DIP("blsi %s,%s\n", nameIRegE(size,pfx,rm), in dis_ESC_0F38__VEX()
29852 nameIRegV(size,pfx)); in dis_ESC_0F38__VEX()
29855 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29857 DIP("blsi %s,%s\n", dis_buf, nameIRegV(size,pfx)); in dis_ESC_0F38__VEX()
29864 putIRegV( size, pfx, mkexpr(dst) ); in dis_ESC_0F38__VEX()
29875 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ in dis_ESC_0F38__VEX()
29876 && !haveREX(pfx) && gregLO3ofRM(getUChar(delta)) == 2) { in dis_ESC_0F38__VEX()
29877 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F38__VEX()
29884 assign( src, getIRegE(size,pfx,rm) ); in dis_ESC_0F38__VEX()
29885 DIP("blsmsk %s,%s\n", nameIRegE(size,pfx,rm), in dis_ESC_0F38__VEX()
29886 nameIRegV(size,pfx)); in dis_ESC_0F38__VEX()
29889 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29891 DIP("blsmsk %s,%s\n", dis_buf, nameIRegV(size,pfx)); in dis_ESC_0F38__VEX()
29898 putIRegV( size, pfx, mkexpr(dst) ); in dis_ESC_0F38__VEX()
29909 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ in dis_ESC_0F38__VEX()
29910 && !haveREX(pfx) && gregLO3ofRM(getUChar(delta)) == 1) { in dis_ESC_0F38__VEX()
29911 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F38__VEX()
29918 assign( src, getIRegE(size,pfx,rm) ); in dis_ESC_0F38__VEX()
29919 DIP("blsr %s,%s\n", nameIRegE(size,pfx,rm), in dis_ESC_0F38__VEX()
29920 nameIRegV(size,pfx)); in dis_ESC_0F38__VEX()
29923 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29925 DIP("blsr %s,%s\n", dis_buf, nameIRegV(size,pfx)); in dis_ESC_0F38__VEX()
29932 putIRegV( size, pfx, mkexpr(dst) ); in dis_ESC_0F38__VEX()
29946 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F38__VEX()
29947 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F38__VEX()
29956 assign( src2, getIRegV(size,pfx) ); in dis_ESC_0F38__VEX()
29958 assign( src1, getIRegE(size,pfx,rm) ); in dis_ESC_0F38__VEX()
29959 DIP("bzhi %s,%s,%s\n", nameIRegV(size,pfx), in dis_ESC_0F38__VEX()
29960 nameIRegE(size,pfx,rm), nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
29963 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29965 DIP("bzhi %s,%s,%s\n", nameIRegV(size,pfx), dis_buf, in dis_ESC_0F38__VEX()
29966 nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
30001 putIRegG( size, pfx, rm, mkexpr(dst) ); in dis_ESC_0F38__VEX()
30012 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F38__VEX()
30013 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F38__VEX()
30019 assign( src, getIRegV(size,pfx) ); in dis_ESC_0F38__VEX()
30021 assign( mask, getIRegE(size,pfx,rm) ); in dis_ESC_0F38__VEX()
30022 DIP("pdep %s,%s,%s\n", nameIRegE(size,pfx,rm), in dis_ESC_0F38__VEX()
30023 nameIRegV(size,pfx), nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
30026 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
30028 DIP("pdep %s,%s,%s\n", dis_buf, nameIRegV(size,pfx), in dis_ESC_0F38__VEX()
30029 nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
30035 putIRegG( size, pfx, rm, in dis_ESC_0F38__VEX()
30045 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F38__VEX()
30046 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F38__VEX()
30052 assign( src, getIRegV(size,pfx) ); in dis_ESC_0F38__VEX()
30054 assign( mask, getIRegE(size,pfx,rm) ); in dis_ESC_0F38__VEX()
30055 DIP("pext %s,%s,%s\n", nameIRegE(size,pfx,rm), in dis_ESC_0F38__VEX()
30056 nameIRegV(size,pfx), nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
30059 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
30061 DIP("pext %s,%s,%s\n", dis_buf, nameIRegV(size,pfx), in dis_ESC_0F38__VEX()
30062 nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
30072 putIRegG( size, pfx, rm, in dis_ESC_0F38__VEX()
30085 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F38__VEX()
30086 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F38__VEX()
30095 assign( src2, getIRegE(size,pfx,rm) ); in dis_ESC_0F38__VEX()
30096 DIP("mulx %s,%s,%s\n", nameIRegE(size,pfx,rm), in dis_ESC_0F38__VEX()
30097 nameIRegV(size,pfx), nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
30100 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
30102 DIP("mulx %s,%s,%s\n", dis_buf, nameIRegV(size,pfx), in dis_ESC_0F38__VEX()
30103 nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
30109 putIRegV( size, pfx, in dis_ESC_0F38__VEX()
30111 putIRegG( size, pfx, rm, in dis_ESC_0F38__VEX()
30123 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F38__VEX()
30124 delta = dis_SHIFTX( uses_vvvv, vbi, pfx, delta, "sarx", Iop_Sar8 ); in dis_ESC_0F38__VEX()
30129 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F38__VEX()
30130 delta = dis_SHIFTX( uses_vvvv, vbi, pfx, delta, "shlx", Iop_Shl8 ); in dis_ESC_0F38__VEX()
30135 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F38__VEX()
30136 delta = dis_SHIFTX( uses_vvvv, vbi, pfx, delta, "shrx", Iop_Shr8 ); in dis_ESC_0F38__VEX()
30141 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F38__VEX()
30142 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F38__VEX()
30152 assign( src2, getIRegV(size,pfx) ); in dis_ESC_0F38__VEX()
30154 assign( src1, getIRegE(size,pfx,rm) ); in dis_ESC_0F38__VEX()
30155 DIP("bextr %s,%s,%s\n", nameIRegV(size,pfx), in dis_ESC_0F38__VEX()
30156 nameIRegE(size,pfx,rm), nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
30159 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
30161 DIP("bextr %s,%s,%s\n", nameIRegV(size,pfx), dis_buf, in dis_ESC_0F38__VEX()
30162 nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
30209 putIRegG( size, pfx, rm, mkexpr(dst) ); in dis_ESC_0F38__VEX()
30236 static Long decode_vregW(Int count, Long delta, UChar modrm, Prefix pfx, in decode_vregW() argument
30247 *dst = gregOfRexRM(pfx, modrm); in decode_vregW()
30251 UInt ereg = eregOfRexRM(pfx, modrm); in decode_vregW()
30256 addr = disAMode(&alen, vbi, pfx, delta, dis_buf, extra_byte); in decode_vregW()
30261 UInt vvvv = getVexNvvvv(pfx); in decode_vregW()
30283 static Long dis_FMA4 (Prefix pfx, Long delta, UChar opc, in dis_FMA4() argument
30311 delta = decode_vregW(4, delta, modrm, pfx, vbi, operand, &dst, getRexW(pfx)); in dis_FMA4()
30396 Prefix pfx, Int sz, Long deltaIN in dis_ESC_0F3A__VEX() argument
30413 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F3A__VEX()
30414 && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F3A__VEX()
30417 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30421 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30428 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30452 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
30453 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
30456 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30457 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30464 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30471 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30492 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
30493 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
30496 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30497 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30504 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30511 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30536 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
30539 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30542 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30549 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30566 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
30569 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30572 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30579 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30594 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
30597 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30600 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30607 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30627 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
30630 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30633 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30640 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30664 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
30665 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
30668 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30669 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30677 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30685 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30711 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
30713 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30725 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30732 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30759 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
30761 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30777 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30784 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30816 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
30818 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30828 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30835 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30860 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
30862 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30874 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30881 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30912 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
30914 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30915 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30922 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30932 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30967 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
30970 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30971 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30976 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30983 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30998 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31001 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31002 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31007 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31014 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31032 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
31035 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31036 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31041 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31048 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31063 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31066 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31067 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31072 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31079 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31097 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31100 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31101 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31106 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31113 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31128 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
31131 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31132 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31139 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31146 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31167 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31169 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31170 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31178 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31185 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31200 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
31202 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31203 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31213 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31220 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31241 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
31242 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
31243 delta = dis_PEXTRB_128_GtoE( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F3A__VEX()
31251 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
31252 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
31253 delta = dis_PEXTRW( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F3A__VEX()
31261 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
31262 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
31263 delta = dis_PEXTRD( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F3A__VEX()
31267 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
31268 && 0==getVexL(pfx)/*128*/ && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F3A__VEX()
31269 delta = dis_PEXTRQ( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F3A__VEX()
31276 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31277 delta = dis_EXTRACTPS( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F3A__VEX()
31286 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
31287 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
31290 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31291 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31294 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31301 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31321 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
31322 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
31325 UInt rS = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31328 UInt rD = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31336 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31352 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
31353 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
31355 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31356 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31361 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F3A__VEX()
31368 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31388 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31390 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31391 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31397 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31408 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31427 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
31428 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
31430 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31431 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31436 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F3A__VEX()
31443 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31459 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
31460 && 0==getVexL(pfx)/*128*/ && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F3A__VEX()
31462 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31463 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31468 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F3A__VEX()
31475 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31496 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
31497 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
31500 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31501 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31504 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31511 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31531 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
31532 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
31535 UInt rS = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31538 UInt rD = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31546 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31562 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31564 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31565 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31569 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F3A__VEX()
31576 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31592 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
31594 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31595 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31599 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F3A__VEX()
31606 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31630 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31632 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31633 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31637 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F3A__VEX()
31644 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31664 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31669 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31670 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31675 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31683 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__VEX()
31699 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
31704 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31705 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31712 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31720 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__VEX()
31746 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31751 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31752 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31757 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31764 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__VEX()
31781 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
31782 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
31785 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31786 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31794 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31802 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31829 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31830 delta = dis_VBLENDV_128 ( vbi, pfx, delta, in dis_ESC_0F3A__VEX()
31838 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
31839 delta = dis_VBLENDV_256 ( vbi, pfx, delta, in dis_ESC_0F3A__VEX()
31850 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31851 delta = dis_VBLENDV_128 ( vbi, pfx, delta, in dis_ESC_0F3A__VEX()
31859 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
31860 delta = dis_VBLENDV_256 ( vbi, pfx, delta, in dis_ESC_0F3A__VEX()
31871 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31872 delta = dis_VBLENDV_128 ( vbi, pfx, delta, in dis_ESC_0F3A__VEX()
31880 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
31881 delta = dis_VBLENDV_256 ( vbi, pfx, delta, in dis_ESC_0F3A__VEX()
31899 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31901 delta = dis_PCMPxSTRx( vbi, pfx, delta, True/*isAvx*/, opc ); in dis_ESC_0F3A__VEX()
31911 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31913 delta = dis_FMA4( pfx, delta, opc, uses_vvvv, vbi ); in dis_ESC_0F3A__VEX()
31924 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31925 delta = dis_AESKEYGENASSIST( vbi, pfx, delta, True/*!isAvx*/ ); in dis_ESC_0F3A__VEX()
31933 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F3A__VEX()
31934 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F3A__VEX()
31942 assign( src, getIRegE(size,pfx,rm) ); in dis_ESC_0F3A__VEX()
31943 DIP("rorx %d,%s,%s\n", imm8, nameIRegE(size,pfx,rm), in dis_ESC_0F3A__VEX()
31944 nameIRegG(size,pfx,rm)); in dis_ESC_0F3A__VEX()
31947 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F3A__VEX()
31950 DIP("rorx %d,%s,%s\n", imm8, dis_buf, nameIRegG(size,pfx,rm)); in dis_ESC_0F3A__VEX()
31956 putIRegG( size, pfx, rm, in dis_ESC_0F3A__VEX()
32020 Prefix pfx = PFX_EMPTY; in disInstr_AMD64_WRK() local
32126 case 0x66: pfx |= PFX_66; break; in disInstr_AMD64_WRK()
32127 case 0x67: pfx |= PFX_ASO; break; in disInstr_AMD64_WRK()
32128 case 0xF2: pfx |= PFX_F2; break; in disInstr_AMD64_WRK()
32129 case 0xF3: pfx |= PFX_F3; break; in disInstr_AMD64_WRK()
32130 case 0xF0: pfx |= PFX_LOCK; *expect_CAS = True; break; in disInstr_AMD64_WRK()
32131 case 0x2E: pfx |= PFX_CS; break; in disInstr_AMD64_WRK()
32132 case 0x3E: pfx |= PFX_DS; break; in disInstr_AMD64_WRK()
32133 case 0x26: pfx |= PFX_ES; break; in disInstr_AMD64_WRK()
32134 case 0x64: pfx |= PFX_FS; break; in disInstr_AMD64_WRK()
32135 case 0x65: pfx |= PFX_GS; break; in disInstr_AMD64_WRK()
32136 case 0x36: pfx |= PFX_SS; break; in disInstr_AMD64_WRK()
32138 pfx |= PFX_REX; in disInstr_AMD64_WRK()
32139 if (pre & (1<<3)) pfx |= PFX_REXW; in disInstr_AMD64_WRK()
32140 if (pre & (1<<2)) pfx |= PFX_REXR; in disInstr_AMD64_WRK()
32141 if (pre & (1<<1)) pfx |= PFX_REXX; in disInstr_AMD64_WRK()
32142 if (pre & (1<<0)) pfx |= PFX_REXB; in disInstr_AMD64_WRK()
32162 pfx |= PFX_VEX; in disInstr_AMD64_WRK()
32164 /* R */ pfx |= (vex1 & (1<<7)) ? 0 : PFX_REXR; in disInstr_AMD64_WRK()
32165 /* X */ pfx |= (vex1 & (1<<6)) ? 0 : PFX_REXX; in disInstr_AMD64_WRK()
32166 /* B */ pfx |= (vex1 & (1<<5)) ? 0 : PFX_REXB; in disInstr_AMD64_WRK()
32176 /* W */ pfx |= (vex2 & (1<<7)) ? PFX_REXW : 0; in disInstr_AMD64_WRK()
32177 /* ~v3 */ pfx |= (vex2 & (1<<6)) ? 0 : PFX_VEXnV3; in disInstr_AMD64_WRK()
32178 /* ~v2 */ pfx |= (vex2 & (1<<5)) ? 0 : PFX_VEXnV2; in disInstr_AMD64_WRK()
32179 /* ~v1 */ pfx |= (vex2 & (1<<4)) ? 0 : PFX_VEXnV1; in disInstr_AMD64_WRK()
32180 /* ~v0 */ pfx |= (vex2 & (1<<3)) ? 0 : PFX_VEXnV0; in disInstr_AMD64_WRK()
32181 /* L */ pfx |= (vex2 & (1<<2)) ? PFX_VEXL : 0; in disInstr_AMD64_WRK()
32185 case 1: pfx |= PFX_66; break; in disInstr_AMD64_WRK()
32186 case 2: pfx |= PFX_F3; break; in disInstr_AMD64_WRK()
32187 case 3: pfx |= PFX_F2; break; in disInstr_AMD64_WRK()
32195 pfx |= PFX_VEX; in disInstr_AMD64_WRK()
32197 /* R */ pfx |= (vex1 & (1<<7)) ? 0 : PFX_REXR; in disInstr_AMD64_WRK()
32198 /* ~v3 */ pfx |= (vex1 & (1<<6)) ? 0 : PFX_VEXnV3; in disInstr_AMD64_WRK()
32199 /* ~v2 */ pfx |= (vex1 & (1<<5)) ? 0 : PFX_VEXnV2; in disInstr_AMD64_WRK()
32200 /* ~v1 */ pfx |= (vex1 & (1<<4)) ? 0 : PFX_VEXnV1; in disInstr_AMD64_WRK()
32201 /* ~v0 */ pfx |= (vex1 & (1<<3)) ? 0 : PFX_VEXnV0; in disInstr_AMD64_WRK()
32202 /* L */ pfx |= (vex1 & (1<<2)) ? PFX_VEXL : 0; in disInstr_AMD64_WRK()
32206 case 1: pfx |= PFX_66; break; in disInstr_AMD64_WRK()
32207 case 2: pfx |= PFX_F3; break; in disInstr_AMD64_WRK()
32208 case 3: pfx |= PFX_F2; break; in disInstr_AMD64_WRK()
32215 if ((pfx & PFX_VEX) && (pfx & PFX_REX)) in disInstr_AMD64_WRK()
32221 if (pfx & PFX_F2) n++; in disInstr_AMD64_WRK()
32222 if (pfx & PFX_F3) n++; in disInstr_AMD64_WRK()
32227 if (pfx & PFX_CS) n++; in disInstr_AMD64_WRK()
32228 if (pfx & PFX_DS) n++; in disInstr_AMD64_WRK()
32229 if (pfx & PFX_ES) n++; in disInstr_AMD64_WRK()
32230 if (pfx & PFX_FS) n++; in disInstr_AMD64_WRK()
32231 if (pfx & PFX_GS) n++; in disInstr_AMD64_WRK()
32232 if (pfx & PFX_SS) n++; in disInstr_AMD64_WRK()
32238 if ((pfx & PFX_FS) && !vbi->guest_amd64_assume_fs_is_const) in disInstr_AMD64_WRK()
32242 if ((pfx & PFX_GS) && !vbi->guest_amd64_assume_gs_is_const) in disInstr_AMD64_WRK()
32247 if (pfx & PFX_66) sz = 2; in disInstr_AMD64_WRK()
32248 if ((pfx & PFX_REX) && (pfx & PFX_REXW)) sz = 8; in disInstr_AMD64_WRK()
32253 if (haveLOCK(pfx)) { in disInstr_AMD64_WRK()
32264 if (!(pfx & PFX_VEX)) { in disInstr_AMD64_WRK()
32282 if (!(pfx & PFX_VEX)) { in disInstr_AMD64_WRK()
32291 archinfo, vbi, pfx, sz, delta ); in disInstr_AMD64_WRK()
32296 archinfo, vbi, pfx, sz, delta ); in disInstr_AMD64_WRK()
32301 archinfo, vbi, pfx, sz, delta ); in disInstr_AMD64_WRK()
32306 archinfo, vbi, pfx, sz, delta ); in disInstr_AMD64_WRK()
32322 archinfo, vbi, pfx, sz, delta ); in disInstr_AMD64_WRK()
32328 archinfo, vbi, pfx, sz, delta ); in disInstr_AMD64_WRK()
32334 archinfo, vbi, pfx, sz, delta ); in disInstr_AMD64_WRK()
32346 if (getVexNvvvv(pfx) != 0) in disInstr_AMD64_WRK()
32379 haveREX(pfx) ? 1 : 0, getRexW(pfx), getRexR(pfx), in disInstr_AMD64_WRK()
32380 getRexX(pfx), getRexB(pfx)); in disInstr_AMD64_WRK()
32382 haveVEX(pfx) ? 1 : 0, getVexL(pfx), in disInstr_AMD64_WRK()
32383 getVexNvvvv(pfx), in disInstr_AMD64_WRK()
32389 have66(pfx) ? 1 : 0, haveF2(pfx) ? 1 : 0, in disInstr_AMD64_WRK()
32390 haveF3(pfx) ? 1 : 0); in disInstr_AMD64_WRK()