/external/llvm/test/CodeGen/X86/ |
D | vec_compare.ll | 48 ; CHECK: pshufd $177 59 ; CHECK: pshufd $177 80 ; CHECK: pshufd $160 82 ; CHECK: pshufd $245 84 ; CHECK: pshufd $245 97 ; CHECK: pshufd $160 99 ; CHECK: pshufd $245 101 ; CHECK: pshufd $245 114 ; CHECK: pshufd $160 116 ; CHECK: pshufd $245 [all …]
|
D | vector-rem.ll | 7 ; CHECK-NEXT: pshufd {{.*#+}} xmm2 = xmm0[3,1,2,3] 9 ; CHECK-NEXT: pshufd {{.*#+}} xmm2 = xmm1[3,1,2,3] 14 ; CHECK-NEXT: pshufd {{.*#+}} xmm3 = xmm0[1,1,2,3] 16 ; CHECK-NEXT: pshufd {{.*#+}} xmm3 = xmm1[1,1,2,3] 27 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,3,0,1] 29 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm1[2,3,0,1] 46 ; CHECK-NEXT: pshufd {{.*#+}} xmm2 = xmm0[3,1,2,3] 48 ; CHECK-NEXT: pshufd {{.*#+}} xmm2 = xmm1[3,1,2,3] 53 ; CHECK-NEXT: pshufd {{.*#+}} xmm3 = xmm0[1,1,2,3] 55 ; CHECK-NEXT: pshufd {{.*#+}} xmm3 = xmm1[1,1,2,3] [all …]
|
D | lower-bitcast.ll | 13 ; pshufd+paddq+pshufd. This is fixed with the widening legalization. 17 ; CHECK: pshufd 19 ; CHECK-NEXT: pshufd 53 ; CHECK-NOT: pshufd 55 ; CHECK-NOT: pshufd 59 ; CHECK-WIDE-NOT: pshufd 61 ; CHECK-WIDE-NOT: pshufd 71 ; FIXME: At the moment we still produce the sequence pshufd+paddd+pshufd. 76 ; CHECK: pshufd 78 ; CHECK-NEXT: pshufd [all …]
|
D | vec_compare-sse4.ll | 14 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2] 16 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3] 18 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3] 29 ; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2] 31 ; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3] 33 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3] 50 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,0,3,2]
|
/external/llvm-project/llvm/test/CodeGen/X86/ |
D | vector-trunc-packus.ll | 27 ; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,0,2,2] 29 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3] 31 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm4[1,1,3,3] 40 ; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm2[0,0,2,2] 42 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3] 44 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,1,3,3] 47 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[0,2,2,3] 58 ; SSSE3-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,0,2,2] 60 ; SSSE3-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3] 62 ; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm4[1,1,3,3] [all …]
|
D | vector-trunc-ssat.ll | 27 ; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,0,2,2] 29 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3] 31 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm4[1,1,3,3] 40 ; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm2[0,0,2,2] 42 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,3,3] 44 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,1,3,3] 49 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[0,2,2,3] 60 ; SSSE3-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,0,2,2] 62 ; SSSE3-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3] 64 ; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm4[1,1,3,3] [all …]
|
D | vec_compare.ll | 56 ; CHECK-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,0,3,2] 68 ; CHECK-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,0,3,2] 86 ; CHECK-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2] 88 ; CHECK-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3] 90 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3] 106 ; CHECK-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2] 108 ; CHECK-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3] 110 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3] 126 ; CHECK-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2] 128 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,3,3] [all …]
|
D | vector-fshl-rot-sub128.ll | 31 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,3,3] 33 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm0[1,3,2,3] 34 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3] 36 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm1[1,3,2,3] 38 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] 39 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3] 46 ; SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,3,3] 51 ; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm1[1,1,3,3] 54 ; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3] 56 ; SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm3[0,0,2,2] [all …]
|
D | sdiv-exact.ll | 41 ; X86-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,3,3] 43 ; X86-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] 45 ; X86-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,2,2,3] 63 ; X86-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,3,3] 65 ; X86-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] 67 ; X86-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,2,2,3] 89 ; X86-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] 91 ; X86-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3] 93 ; X86-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,2,2,3] 116 ; X86-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3] [all …]
|
D | split-vector-rem.ll | 8 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[3,3,3,3] 10 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm2[3,3,3,3] 15 ; CHECK-NEXT: pshufd {{.*#+}} xmm5 = xmm4[2,3,2,3] 17 ; CHECK-NEXT: pshufd {{.*#+}} xmm5 = xmm2[2,3,2,3] 28 ; CHECK-NEXT: pshufd {{.*#+}} xmm4 = xmm4[1,1,1,1] 30 ; CHECK-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,1,1] 37 ; CHECK-NEXT: pshufd {{.*#+}} xmm2 = xmm1[3,3,3,3] 39 ; CHECK-NEXT: pshufd {{.*#+}} xmm2 = xmm3[3,3,3,3] 44 ; CHECK-NEXT: pshufd {{.*#+}} xmm4 = xmm1[2,3,2,3] 46 ; CHECK-NEXT: pshufd {{.*#+}} xmm4 = xmm3[2,3,2,3] [all …]
|
D | vector-fshr-rot-sub128.ll | 33 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,3,3] 35 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm0[1,3,2,3] 36 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3] 38 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm1[1,3,2,3] 40 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] 41 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3] 54 ; SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm1[1,1,3,3] 55 ; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm0[1,1,3,3] 58 ; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3] 60 ; SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm3[0,0,2,2] [all …]
|
D | combine-rotates.ll | 12 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,3,3] 14 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm0[1,3,2,3] 15 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3] 17 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm1[1,3,2,3] 19 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] 20 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3] 129 ; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm4[1,3,2,3] 130 ; SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm0[1,1,3,3] 131 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm3[1,1,3,3] 133 ; SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm3[1,3,2,3] [all …]
|
D | vec_shift6.ll | 46 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3] 48 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3] 50 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] 120 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm0[1,1,3,3] 122 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] 123 ; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm2[1,1,3,3] 125 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,3] 128 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3] 129 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3] 131 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3] [all …]
|
D | vector-trunc-usat.ll | 26 ; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2] 28 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3] 30 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm3[1,1,3,3] 35 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[0,2,2,3] 45 ; SSSE3-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2] 47 ; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3] 49 ; SSSE3-NEXT: pshufd {{.*#+}} xmm2 = xmm3[1,1,3,3] 54 ; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[0,2,2,3] 67 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm3[0,0,2,2] 71 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[0,2,2,3] [all …]
|
D | urem-seteq-vec-nonsplat.ll | 15 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,3,2,3] 19 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3] 21 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm1[1,3,2,3] 29 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3] 32 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm4[1,1,3,3] 34 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3] 48 ; CHECK-SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[1,1,3,3] 51 ; CHECK-SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,1,3,3] 128 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3] 130 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] [all …]
|
D | vector-rem.ll | 7 ; CHECK-NEXT: pshufd {{.*#+}} xmm2 = xmm0[3,3,3,3] 9 ; CHECK-NEXT: pshufd {{.*#+}} xmm2 = xmm1[3,3,3,3] 14 ; CHECK-NEXT: pshufd {{.*#+}} xmm3 = xmm0[2,3,2,3] 16 ; CHECK-NEXT: pshufd {{.*#+}} xmm3 = xmm1[2,3,2,3] 27 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,1,1] 29 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,1,1] 45 ; CHECK-NEXT: pshufd {{.*#+}} xmm2 = xmm0[3,3,3,3] 47 ; CHECK-NEXT: pshufd {{.*#+}} xmm2 = xmm1[3,3,3,3] 52 ; CHECK-NEXT: pshufd {{.*#+}} xmm3 = xmm0[2,3,2,3] 54 ; CHECK-NEXT: pshufd {{.*#+}} xmm3 = xmm1[2,3,2,3] [all …]
|
D | srem-seteq-vec-nonsplat.ll | 15 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm1[1,3,2,3] 16 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm3[1,1,3,3] 17 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm0[1,1,3,3] 19 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm4[1,3,2,3] 42 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm2[1,1,3,3] 44 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3] 45 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm3[1,1,3,3] 47 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,3] 57 ; CHECK-SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm1[1,1,3,3] 58 ; CHECK-SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm0[1,1,3,3] [all …]
|
D | combine-shl.ll | 86 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,3,3] 88 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] 89 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3] 91 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3] 120 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,3,3] 122 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] 123 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3] 125 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3] 182 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,3,3] 184 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] [all …]
|
D | vec_compare-sse4.ll | 14 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2] 16 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3] 18 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3] 29 ; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2] 31 ; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3] 33 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3] 50 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,0,3,2]
|
/external/openscreen/third_party/boringssl/mac-x86/crypto/fipsmodule/ |
D | ghash-x86.S | 21 pshufd $78,%xmm2,%xmm2 22 pshufd $255,%xmm2,%xmm4 34 pshufd $78,%xmm0,%xmm3 35 pshufd $78,%xmm2,%xmm4 68 pshufd $78,%xmm2,%xmm3 69 pshufd $78,%xmm0,%xmm4 94 pshufd $78,%xmm0,%xmm3 158 pshufd $78,%xmm6,%xmm3 172 pshufd $78,%xmm0,%xmm4 211 pshufd $78,%xmm7,%xmm3 [all …]
|
/external/openscreen/third_party/boringssl/linux-x86/crypto/fipsmodule/ |
D | ghash-x86.S | 22 pshufd $78,%xmm2,%xmm2 23 pshufd $255,%xmm2,%xmm4 35 pshufd $78,%xmm0,%xmm3 36 pshufd $78,%xmm2,%xmm4 69 pshufd $78,%xmm2,%xmm3 70 pshufd $78,%xmm0,%xmm4 97 pshufd $78,%xmm0,%xmm3 163 pshufd $78,%xmm6,%xmm3 177 pshufd $78,%xmm0,%xmm4 216 pshufd $78,%xmm7,%xmm3 [all …]
|
/external/boringssl/linux-x86/crypto/fipsmodule/ |
D | ghash-x86.S | 22 pshufd $78,%xmm2,%xmm2 23 pshufd $255,%xmm2,%xmm4 35 pshufd $78,%xmm0,%xmm3 36 pshufd $78,%xmm2,%xmm4 69 pshufd $78,%xmm2,%xmm3 70 pshufd $78,%xmm0,%xmm4 97 pshufd $78,%xmm0,%xmm3 163 pshufd $78,%xmm6,%xmm3 177 pshufd $78,%xmm0,%xmm4 216 pshufd $78,%xmm7,%xmm3 [all …]
|
/external/boringssl/mac-x86/crypto/fipsmodule/ |
D | ghash-x86.S | 21 pshufd $78,%xmm2,%xmm2 22 pshufd $255,%xmm2,%xmm4 34 pshufd $78,%xmm0,%xmm3 35 pshufd $78,%xmm2,%xmm4 68 pshufd $78,%xmm2,%xmm3 69 pshufd $78,%xmm0,%xmm4 94 pshufd $78,%xmm0,%xmm3 158 pshufd $78,%xmm6,%xmm3 172 pshufd $78,%xmm0,%xmm4 211 pshufd $78,%xmm7,%xmm3 [all …]
|
/external/rust/crates/quiche/deps/boringssl/mac-x86/crypto/fipsmodule/ |
D | ghash-x86.S | 21 pshufd $78,%xmm2,%xmm2 22 pshufd $255,%xmm2,%xmm4 34 pshufd $78,%xmm0,%xmm3 35 pshufd $78,%xmm2,%xmm4 68 pshufd $78,%xmm2,%xmm3 69 pshufd $78,%xmm0,%xmm4 94 pshufd $78,%xmm0,%xmm3 158 pshufd $78,%xmm6,%xmm3 172 pshufd $78,%xmm0,%xmm4 211 pshufd $78,%xmm7,%xmm3 [all …]
|
/external/rust/crates/quiche/deps/boringssl/linux-x86/crypto/fipsmodule/ |
D | ghash-x86.S | 22 pshufd $78,%xmm2,%xmm2 23 pshufd $255,%xmm2,%xmm4 35 pshufd $78,%xmm0,%xmm3 36 pshufd $78,%xmm2,%xmm4 69 pshufd $78,%xmm2,%xmm3 70 pshufd $78,%xmm0,%xmm4 97 pshufd $78,%xmm0,%xmm3 163 pshufd $78,%xmm6,%xmm3 177 pshufd $78,%xmm0,%xmm4 216 pshufd $78,%xmm7,%xmm3 [all …]
|