/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/X86/MCTargetDesc/ |
D | X86ATTInstPrinter.cpp | 166 if (Desc.TSFlags & X86II::VEX_W) in printVecCompareInstr() 174 NumElts = (Desc.TSFlags & X86II::VEX_W) ? 8 : 16; in printVecCompareInstr() 176 NumElts = (Desc.TSFlags & X86II::VEX_W) ? 4 : 8; in printVecCompareInstr() 178 NumElts = (Desc.TSFlags & X86II::VEX_W) ? 2 : 4; in printVecCompareInstr() 308 if (Desc.TSFlags & X86II::VEX_W) in printVecCompareInstr() 316 NumElts = (Desc.TSFlags & X86II::VEX_W) ? 8 : 16; in printVecCompareInstr() 318 NumElts = (Desc.TSFlags & X86II::VEX_W) ? 4 : 8; in printVecCompareInstr() 320 NumElts = (Desc.TSFlags & X86II::VEX_W) ? 2 : 4; in printVecCompareInstr()
|
D | X86IntelInstPrinter.cpp | 156 if (Desc.TSFlags & X86II::VEX_W) in printVecCompareInstr() 164 NumElts = (Desc.TSFlags & X86II::VEX_W) ? 8 : 16; in printVecCompareInstr() 166 NumElts = (Desc.TSFlags & X86II::VEX_W) ? 4 : 8; in printVecCompareInstr() 168 NumElts = (Desc.TSFlags & X86II::VEX_W) ? 2 : 4; in printVecCompareInstr() 296 if (Desc.TSFlags & X86II::VEX_W) in printVecCompareInstr() 304 NumElts = (Desc.TSFlags & X86II::VEX_W) ? 8 : 16; in printVecCompareInstr() 306 NumElts = (Desc.TSFlags & X86II::VEX_W) ? 4 : 8; in printVecCompareInstr() 308 NumElts = (Desc.TSFlags & X86II::VEX_W) ? 2 : 4; in printVecCompareInstr()
|
D | X86MCCodeEmitter.cpp | 775 uint8_t VEX_W = (TSFlags & X86II::VEX_W) ? 1 : 0; in emitVEXOpcodePrefix() local 1129 VEX_B && VEX_X && !VEX_W && (VEX_5M == 1)) { in emitVEXOpcodePrefix() 1138 emitByte(LastByte | (VEX_W << 7), CurByte, OS); in emitVEXOpcodePrefix() 1153 emitByte((VEX_W << 7) | (VEX_4V << 3) | (EVEX_U << 2) | VEX_PP, CurByte, in emitVEXOpcodePrefix()
|
D | X86BaseInfo.h | 859 VEX_W = 1ULL << VEX_WShift, enumerator
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/X86/ |
D | X86InstrFMA.td | 142 v4f64, SchedWriteFMA>, VEX_W; 145 v4f64, SchedWriteFMA>, VEX_W; 148 v2f64, v4f64, SchedWriteFMA>, VEX_W; 151 v2f64, v4f64, SchedWriteFMA>, VEX_W; 163 loadv4f64, X86Fnmadd, v2f64, v4f64, SchedWriteFMA>, VEX_W; 165 loadv4f64, X86Fnmsub, v2f64, v4f64, SchedWriteFMA>, VEX_W; 319 VR128, sdmem, sched>, VEX_W; 399 (OpVT (OpNode RC:$src1, RC:$src2, RC:$src3)))]>, VEX_W, VEX_LIG, 406 (mem_frag addr:$src3)))]>, VEX_W, VEX_LIG, 437 []>, VEX_W, VEX_LIG, Sched<[sched]>; [all …]
|
D | X86InstrAVX512.td | 557 vinsert256_insert, sched>, VEX_W, EVEX_V512; 573 VEX_W, EVEX_V512; 846 VEX_W, EVEX_V512, EVEX_CD8<64, CD8VT4>; 869 VEX_W, EVEX_V512, EVEX_CD8<64, CD8VT2>; 1338 X86VBroadcast, GR64, HasAVX512>, VEX_W; 1472 v8i64_info, v4i64x_info>, VEX_W, 1475 v8f64_info, v4f64x_info>, VEX_W, 1641 v8i64_info, v2i64x_info>, VEX_W, 1647 v8f64_info, v2f64x_info>, VEX_W, 1743 avx512vl_i64_info, VK8>, VEX_W; [all …]
|
D | X86InstrXOP.td | 108 XOP_4V, VEX_W, Sched<[sched.Folded, sched.ReadAfterFold]>; 122 XOP_4V, VEX_W, Sched<[sched]>, FoldGenData<NAME#rr>; 299 XOP_4V, VEX_W, Sched<[sched.Folded, sched.ReadAfterFold, sched.ReadAfterFold]>; 319 []>, XOP_4V, VEX_W, Sched<[sched]>, FoldGenData<NAME#rrr>; 345 XOP_4V, VEX_W, Sched<[sched.Folded, sched.ReadAfterFold, sched.ReadAfterFold]>; 364 []>, XOP_4V, VEX_W, Sched<[sched]>, FoldGenData<NAME#rrr>; 433 (i8 timm:$src4))))]>, VEX_W, 453 []>, VEX_W, Sched<[sched]>, FoldGenData<NAME#rr>;
|
D | X86InstrFormats.td | 213 class VEX_W { bit HasVEX_W = 1; } 215 // Special version of VEX_W that can be changed to VEX.W==0 for EVEX2VEX. 301 bit HasVEX_W = 0; // Does this inst set the VEX_W field? 302 bit IgnoresVEX_W = 0; // Does this inst ignore VEX_W field? 962 : VS2I<o, F, outs, ins, asm, pattern>, VEX_W;
|
D | X86InstrInfo.td | 2433 defm BLSR64 : bmi_bls<"blsr{q}", MRM1r, MRM1m, GR64, i64mem, WriteBLS>, VEX_W; 2435 defm BLSMSK64 : bmi_bls<"blsmsk{q}", MRM2r, MRM2m, GR64, i64mem, WriteBLS>, VEX_W; 2437 defm BLSI64 : bmi_bls<"blsi{q}", MRM3r, MRM3m, GR64, i64mem, WriteBLS>, VEX_W; 2516 X86bextr, loadi64, WriteBEXTR>, VEX_W; 2542 X86bzhi, loadi64, WriteBZHI>, VEX_W; 2600 int_x86_bmi_pdep_64, loadi64>, T8XD, VEX_W; 2604 int_x86_bmi_pext_64, loadi64>, T8XS, VEX_W; 2635 i64immSExt32, WriteBEXTR>, VEX_W; 2657 i64mem, Sched>, VEX_W; 2691 [(int_x86_llwpcb GR64:$src)]>, XOP, XOP9, VEX_W; [all …]
|
D | X86InstrShiftRotate.td | 906 defm RORX64 : bmi_rotate<"rorx{q}", GR64, i64mem>, VEX_W; 908 defm SARX64 : bmi_shift<"sarx{q}", GR64, i64mem>, T8XS, VEX_W; 910 defm SHRX64 : bmi_shift<"shrx{q}", GR64, i64mem>, T8XD, VEX_W; 912 defm SHLX64 : bmi_shift<"shlx{q}", GR64, i64mem>, T8PD, VEX_W;
|
D | X86InstrSSE.td | 878 XS, VEX, VEX_W, VEX_LIG; 886 XD, VEX, VEX_W, VEX_LIG; 899 VEX_W, VEX_LIG, SIMD_EXC; 905 VEX_W, VEX_LIG, SIMD_EXC; 1001 WriteCvtSD2I, SSEPackedDouble>, XD, VEX, VEX_W, VEX_LIG; 1017 XS, VEX_4V, VEX_LIG, VEX_W, SIMD_EXC; 1023 XD, VEX_4V, VEX_LIG, VEX_W, SIMD_EXC; 1078 XS, VEX, VEX_LIG, VEX_W; 1085 XD, VEX, VEX_LIG, VEX_W; 1144 WriteCvtSS2I, SSEPackedSingle>, XS, VEX, VEX_W, VEX_LIG; [all …]
|
D | X86InstrArithmetic.td | 1289 defm ANDN64 : bmi_andn<"andn{q}", GR64, i64mem, loadi64, WriteALU>, T8PS, VEX_4V, VEX_W; 1325 defm MULX64 : bmi_mulx<"mulx{q}", GR64, i64mem, WriteIMul64>, VEX_W;
|
D | X86MCInstLower.cpp | 919 !(TSFlags & X86II::VEX_W) && (TSFlags & X86II::VEX_4V) && in Lower()
|