Home
last modified time | relevance | path

Searched refs:vpalignr (Results 1 – 25 of 119) sorted by relevance

12345

/external/boringssl/linux-x86_64/crypto/cipher_extra/
Dchacha20_poly1305_x86_64.S4146 vpalignr $12,%ymm12,%ymm12,%ymm12
4147 vpalignr $8,%ymm8,%ymm8,%ymm8
4148 vpalignr $4,%ymm4,%ymm4,%ymm4
4165 vpalignr $4,%ymm12,%ymm12,%ymm12
4166 vpalignr $8,%ymm8,%ymm8,%ymm8
4167 vpalignr $12,%ymm4,%ymm4,%ymm4
4386 vpalignr $4,%ymm7,%ymm7,%ymm7
4387 vpalignr $8,%ymm11,%ymm11,%ymm11
4388 vpalignr $12,%ymm15,%ymm15,%ymm15
4389 vpalignr $4,%ymm6,%ymm6,%ymm6
[all …]
/external/boringssl/mac-x86_64/crypto/cipher_extra/
Dchacha20_poly1305_x86_64.S4145 vpalignr $12,%ymm12,%ymm12,%ymm12
4146 vpalignr $8,%ymm8,%ymm8,%ymm8
4147 vpalignr $4,%ymm4,%ymm4,%ymm4
4164 vpalignr $4,%ymm12,%ymm12,%ymm12
4165 vpalignr $8,%ymm8,%ymm8,%ymm8
4166 vpalignr $12,%ymm4,%ymm4,%ymm4
4385 vpalignr $4,%ymm7,%ymm7,%ymm7
4386 vpalignr $8,%ymm11,%ymm11,%ymm11
4387 vpalignr $12,%ymm15,%ymm15,%ymm15
4388 vpalignr $4,%ymm6,%ymm6,%ymm6
[all …]
/external/swiftshader/third_party/llvm-7.0/llvm/test/CodeGen/X86/
Dx86-interleaved-access.ll1022 ; AVX1-NEXT: vpalignr {{.*#+}} xmm6 = xmm5[11,12,13,14,15],xmm3[0,1,2,3,4,5,6,7,8,9,10]
1023 ; AVX1-NEXT: vpalignr {{.*#+}} xmm7 = xmm2[11,12,13,14,15],xmm0[0,1,2,3,4,5,6,7,8,9,10]
1024 ; AVX1-NEXT: vpalignr {{.*#+}} xmm3 = xmm3[11,12,13,14,15],xmm4[0,1,2,3,4,5,6,7,8,9,10]
1025 ; AVX1-NEXT: vpalignr {{.*#+}} xmm0 = xmm0[11,12,13,14,15],xmm1[0,1,2,3,4,5,6,7,8,9,10]
1027 ; AVX1-NEXT: vpalignr {{.*#+}} xmm4 = xmm4[11,12,13,14,15],xmm5[0,1,2,3,4,5,6,7,8,9,10]
1028 ; AVX1-NEXT: vpalignr {{.*#+}} xmm1 = xmm1[11,12,13,14,15],xmm2[0,1,2,3,4,5,6,7,8,9,10]
1030 ; AVX1-NEXT: vpalignr {{.*#+}} xmm9 = xmm7[11,12,13,14,15],xmm1[0,1,2,3,4,5,6,7,8,9,10]
1031 ; AVX1-NEXT: vpalignr {{.*#+}} xmm4 = xmm6[11,12,13,14,15],xmm4[0,1,2,3,4,5,6,7,8,9,10]
1064 ; AVX-NEXT: vpalignr {{.*#+}} ymm3 = ymm2[11,12,13,14,15],ymm0[0,1,2,3,4,5,6,7,8,9,10],ymm2[27,2…
1065 ; AVX-NEXT: vpalignr {{.*#+}} ymm0 = ymm0[11,12,13,14,15],ymm1[0,1,2,3,4,5,6,7,8,9,10],ymm0[27,2…
[all …]
Dpalignr.ll35 ; CHECK-AVX-NEXT: vpalignr {{.*#+}} xmm0 = xmm0[4,5,6,7,8,9,10,11,12,13,14,15],xmm1[0,1,2,3]
55 ; CHECK-AVX-NEXT: vpalignr {{.*#+}} xmm0 = xmm0[4,5,6,7,8,9,10,11,12,13,14,15],xmm1[0,1,2,3]
75 ; CHECK-AVX-NEXT: vpalignr {{.*#+}} xmm0 = xmm1[8,9,10,11,12,13,14,15],xmm0[0,1,2,3,4,5,6,7]
112 ; CHECK-AVX-NEXT: vpalignr {{.*#+}} xmm0 = xmm0[6,7,8,9,10,11,12,13,14,15],xmm1[0,1,2,3,4,5]
134 ; CHECK-AVX-NEXT: vpalignr {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],xmm1[0,1,2,3,4,5,6,7,8,9]
156 ; CHECK-AVX-NEXT: vpalignr {{.*#+}} xmm0 = xmm0[5,6,7,8,9,10,11,12,13,14,15],xmm1[0,1,2,3,4]
184 ; CHECK-AVX-NEXT: vpalignr {{.*#+}} xmm0 = xmm1[2,3,4,5,6,7,8,9,10,11,12,13,14,15,0,1]
Dtrunc-subvector.ll112 ; AVX2-NEXT: vpalignr {{.*#+}} ymm0 = ymm1[8,9,10,11,12,13,14,15],ymm0[0,1,2,3,4,5,6,7],ymm1[24,…
123 ; AVX512-NEXT: vpalignr {{.*#+}} xmm0 = xmm0[8,9,10,11,12,13,14,15],xmm1[0,1,2,3,4,5,6,7]
231 ; AVX2-NEXT: vpalignr {{.*#+}} ymm0 = ymm1[8,9,10,11,12,13,14,15],ymm0[0,1,2,3,4,5,6,7],ymm1[24,…
242 ; AVX512-NEXT: vpalignr {{.*#+}} xmm0 = xmm0[8,9,10,11,12,13,14,15],xmm1[0,1,2,3,4,5,6,7]
D2012-04-26-sdglue.ll12 ; CHECK-NEXT: vpalignr {{.*#+}} xmm1 = xmm3[4,5,6,7,8,9,10,11,12,13,14,15],xmm1[0,1,2,3]
14 ; CHECK-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[4,5,6,7,8,9,10,11,12,13,14,15],xmm3[0,1,2,3]
Dpr34592.ll29 ; CHECK-NEXT: vpalignr {{.*#+}} ymm6 = ymm2[8,9,10,11,12,13,14,15],ymm6[0,1,2,3,4,5,6,7],ymm2[24…
45 ; CHECK-NEXT: vpalignr {{.*#+}} ymm11 = ymm11[8,9,10,11,12,13,14,15],ymm5[0,1,2,3,4,5,6,7],ymm11…
Dvector-shuffle-256-v16.ll2008 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[14,15],xmm3[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
2009 ; AVX1-NEXT: vpalignr {{.*#+}} xmm0 = xmm1[14,15],xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
2015 ; AVX2OR512VL-NEXT: vpalignr {{.*#+}} ymm0 = ymm1[14,15],ymm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13],y…
2026 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[2,3,4,5,6,7,8,9,10,11,12,13,14,15],xmm3[0,1]
2027 ; AVX1-NEXT: vpalignr {{.*#+}} xmm0 = xmm0[2,3,4,5,6,7,8,9,10,11,12,13,14,15],xmm1[0,1]
2033 ; AVX2OR512VL-NEXT: vpalignr {{.*#+}} ymm0 = ymm0[2,3,4,5,6,7,8,9,10,11,12,13,14,15],ymm1[0,1],y…
2044 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[2,3,4,5,6,7,8,9,10,11,12,13,14,15],xmm3[0,1]
2045 ; AVX1-NEXT: vpalignr {{.*#+}} xmm0 = xmm1[2,3,4,5,6,7,8,9,10,11,12,13,14,15],xmm0[0,1]
2051 ; AVX2OR512VL-NEXT: vpalignr {{.*#+}} ymm0 = ymm1[2,3,4,5,6,7,8,9,10,11,12,13,14,15],ymm0[0,1],y…
2062 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[14,15],xmm3[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
[all …]
/external/llvm/test/MC/X86/
Dshuffle-comments.s18 vpalignr $8, %xmm0, %xmm1, %xmm2 label
20 vpalignr $8, (%rax), %xmm1, %xmm2 label
23 vpalignr $16, %xmm0, %xmm1, %xmm2 label
25 vpalignr $16, (%rax), %xmm1, %xmm2 label
28 vpalignr $0, %xmm0, %xmm1, %xmm2 label
30 vpalignr $0, (%rax), %xmm1, %xmm2 label
33 vpalignr $8, %ymm0, %ymm1, %ymm2 label
35 vpalignr $8, (%rax), %ymm1, %ymm2 label
38 vpalignr $16, %ymm0, %ymm1, %ymm2 label
40 vpalignr $16, (%rax), %ymm1, %ymm2 label
[all …]
Dx86-64-avx512bw_vl.s8565 vpalignr $171, %xmm21, %xmm26, %xmm19
8569 vpalignr $171, %xmm21, %xmm26, %xmm19 {%k4}
8573 vpalignr $171, %xmm21, %xmm26, %xmm19 {%k4} {z}
8577 vpalignr $123, %xmm21, %xmm26, %xmm19
8581 vpalignr $123, (%rcx), %xmm26, %xmm19
8585 vpalignr $123, 291(%rax,%r14,8), %xmm26, %xmm19
8589 vpalignr $123, 2032(%rdx), %xmm26, %xmm19
8593 vpalignr $123, 2048(%rdx), %xmm26, %xmm19
8597 vpalignr $123, -2048(%rdx), %xmm26, %xmm19
8601 vpalignr $123, -2064(%rdx), %xmm26, %xmm19
[all …]
Dx86-64-avx512bw.s4348 vpalignr $171, %zmm17, %zmm26, %zmm22
4352 vpalignr $171, %zmm17, %zmm26, %zmm22 {%k3}
4356 vpalignr $171, %zmm17, %zmm26, %zmm22 {%k3} {z}
4360 vpalignr $123, %zmm17, %zmm26, %zmm22
4364 vpalignr $123, (%rcx), %zmm26, %zmm22
4368 vpalignr $123, 291(%rax,%r14,8), %zmm26, %zmm22
4372 vpalignr $123, 8128(%rdx), %zmm26, %zmm22
4376 vpalignr $123, 8192(%rdx), %zmm26, %zmm22
4380 vpalignr $123, -8192(%rdx), %zmm26, %zmm22
4384 vpalignr $123, -8256(%rdx), %zmm26, %zmm22
/external/swiftshader/third_party/llvm-7.0/llvm/test/MC/X86/
Dshuffle-comments.s18 vpalignr $8, %xmm0, %xmm1, %xmm2 label
20 vpalignr $8, (%rax), %xmm1, %xmm2 label
23 vpalignr $16, %xmm0, %xmm1, %xmm2 label
25 vpalignr $16, (%rax), %xmm1, %xmm2 label
28 vpalignr $0, %xmm0, %xmm1, %xmm2 label
30 vpalignr $0, (%rax), %xmm1, %xmm2 label
33 vpalignr $8, %ymm0, %ymm1, %ymm2 label
35 vpalignr $8, (%rax), %ymm1, %ymm2 label
38 vpalignr $16, %ymm0, %ymm1, %ymm2 label
40 vpalignr $16, (%rax), %ymm1, %ymm2 label
[all …]
Dx86-64-avx512bw_vl.s8565 vpalignr $171, %xmm21, %xmm26, %xmm19
8569 vpalignr $171, %xmm21, %xmm26, %xmm19 {%k4}
8573 vpalignr $171, %xmm21, %xmm26, %xmm19 {%k4} {z}
8577 vpalignr $123, %xmm21, %xmm26, %xmm19
8581 vpalignr $123, (%rcx), %xmm26, %xmm19
8585 vpalignr $123, 291(%rax,%r14,8), %xmm26, %xmm19
8589 vpalignr $123, 2032(%rdx), %xmm26, %xmm19
8593 vpalignr $123, 2048(%rdx), %xmm26, %xmm19
8597 vpalignr $123, -2048(%rdx), %xmm26, %xmm19
8601 vpalignr $123, -2064(%rdx), %xmm26, %xmm19
[all …]
Dx86-64-avx512bw.s4348 vpalignr $171, %zmm17, %zmm26, %zmm22
4352 vpalignr $171, %zmm17, %zmm26, %zmm22 {%k3}
4356 vpalignr $171, %zmm17, %zmm26, %zmm22 {%k3} {z}
4360 vpalignr $123, %zmm17, %zmm26, %zmm22
4364 vpalignr $123, (%rcx), %zmm26, %zmm22
4368 vpalignr $123, 291(%rax,%r14,8), %zmm26, %zmm22
4372 vpalignr $123, 8128(%rdx), %zmm26, %zmm22
4376 vpalignr $123, 8192(%rdx), %zmm26, %zmm22
4380 vpalignr $123, -8192(%rdx), %zmm26, %zmm22
4384 vpalignr $123, -8256(%rdx), %zmm26, %zmm22
/external/llvm/test/CodeGen/X86/
Dvector-shuffle-512-v64.ll72 ; AVX512F-NEXT: vpalignr {{.*#+}} ymm0 = ymm2[15],ymm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14],ymm2[…
73 ; AVX512F-NEXT: vpalignr {{.*#+}} ymm1 = ymm3[15],ymm1[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14],ymm3[…
78 ; AVX512BW-NEXT: vpalignr {{.*#+}} zmm0 = zmm1[15],zmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14],zmm1…
83 ; AVX512DQ-NEXT: vpalignr {{.*#+}} ymm0 = ymm2[15],ymm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14],ymm2…
84 ; AVX512DQ-NEXT: vpalignr {{.*#+}} ymm1 = ymm3[15],ymm1[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14],ymm3…
Dvector-shuffle-256-v16.ll1496 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[14,15],xmm3[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
1497 ; AVX1-NEXT: vpalignr {{.*#+}} xmm0 = xmm1[14,15],xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
1503 ; AVX2-NEXT: vpalignr {{.*#+}} ymm0 = ymm1[14,15],ymm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13],ymm1[30,…
1514 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[2,3,4,5,6,7,8,9,10,11,12,13,14,15],xmm3[0,1]
1515 ; AVX1-NEXT: vpalignr {{.*#+}} xmm0 = xmm0[2,3,4,5,6,7,8,9,10,11,12,13,14,15],xmm1[0,1]
1521 ; AVX2-NEXT: vpalignr {{.*#+}} ymm0 = ymm0[2,3,4,5,6,7,8,9,10,11,12,13,14,15],ymm1[0,1],ymm0[18,…
1532 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[2,3,4,5,6,7,8,9,10,11,12,13,14,15],xmm3[0,1]
1533 ; AVX1-NEXT: vpalignr {{.*#+}} xmm0 = xmm1[2,3,4,5,6,7,8,9,10,11,12,13,14,15],xmm0[0,1]
1539 ; AVX2-NEXT: vpalignr {{.*#+}} ymm0 = ymm1[2,3,4,5,6,7,8,9,10,11,12,13,14,15],ymm0[0,1],ymm1[18,…
1550 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[14,15],xmm3[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
[all …]
Dvector-shuffle-256-v32.ll1815 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[15],xmm3[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14]
1816 ; AVX1-NEXT: vpalignr {{.*#+}} xmm0 = xmm1[15],xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14]
1822 ; AVX2-NEXT: vpalignr {{.*#+}} ymm0 = ymm1[15],ymm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14],ymm1[31]…
1833 ; AVX1-NEXT: vpalignr {{.*#+}} xmm1 = xmm1[15],xmm2[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14]
1840 ; AVX2-NEXT: vpalignr {{.*#+}} ymm0 = ymm1[15],ymm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14],ymm1[31]…
1849 ; AVX1-NEXT: vpalignr {{.*#+}} xmm1 = xmm1[15],xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14]
1857 ; AVX2-NEXT: vpalignr {{.*#+}} ymm0 = ymm1[15],ymm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14],ymm1[31]…
1874 ; AVX2-NEXT: vpalignr {{.*#+}} ymm0 = ymm1[15],ymm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14],ymm1[31]…
1885 ; AVX1-NEXT: vpalignr {{.*#+}} xmm0 = xmm1[15],xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14]
1891 ; AVX2-NEXT: vpalignr {{.*#+}} ymm0 = ymm1[15],ymm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14],ymm1[31]…
[all …]
/external/libjpeg-turbo/simd/x86_64/
Djdsample-avx2.asm376 vpalignr ymm0, ymm0, ymm7, 2 ; ymm0=( 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 --)
383 vpalignr ymm6, ymm3, ymm6, 14 ; ymm6=(-- 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30)
389 vpalignr ymm2, ymm2, ymm3, 2 ; ymm2=(17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 --)
393 vpalignr ymm1, ymm7, ymm1, 14 ; ymm1=(-- 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14)
430 vpalignr ymm7, ymm7, ymm6, 2 ; ymm7=( 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 --)
437 vpalignr ymm2, ymm4, ymm2, 14 ; ymm2=(-- 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30)
443 vpalignr ymm5, ymm5, ymm4, 2 ; ymm5=(17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 --)
447 vpalignr ymm1, ymm6, ymm1, 14 ; ymm1=(-- 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14)
/external/libjpeg-turbo/simd/i386/
Djdsample-avx2.asm473 vpalignr ymm7, ymm7, ymm6, 2 ; ymm7=( 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 --)
480 vpalignr ymm2, ymm4, ymm2, 14 ; ymm2=(-- 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30)
486 vpalignr ymm5, ymm5, ymm4, 2 ; ymm5=(17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 --)
490 vpalignr ymm1, ymm6, ymm1, 14 ; ymm1=(-- 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14)
/external/boringssl/linux-x86_64/crypto/fipsmodule/
Daesni-gcm-x86_64.S202 vpalignr $8,%xmm4,%xmm4,%xmm0
287 vpalignr $8,%xmm4,%xmm4,%xmm8
749 vpalignr $8,%xmm8,%xmm8,%xmm14
774 vpalignr $8,%xmm8,%xmm8,%xmm14
809 vpalignr $8,%xmm8,%xmm8,%xmm2
813 vpalignr $8,%xmm8,%xmm8,%xmm2
Dsha512-x86_64.S1911 vpalignr $8,%xmm0,%xmm1,%xmm8
1914 vpalignr $8,%xmm4,%xmm5,%xmm11
1988 vpalignr $8,%xmm1,%xmm2,%xmm8
1991 vpalignr $8,%xmm5,%xmm6,%xmm11
2065 vpalignr $8,%xmm2,%xmm3,%xmm8
2068 vpalignr $8,%xmm6,%xmm7,%xmm11
2142 vpalignr $8,%xmm3,%xmm4,%xmm8
2145 vpalignr $8,%xmm7,%xmm0,%xmm11
2219 vpalignr $8,%xmm4,%xmm5,%xmm8
2222 vpalignr $8,%xmm0,%xmm1,%xmm11
[all …]
/external/boringssl/mac-x86_64/crypto/fipsmodule/
Daesni-gcm-x86_64.S202 vpalignr $8,%xmm4,%xmm4,%xmm0
287 vpalignr $8,%xmm4,%xmm4,%xmm8
748 vpalignr $8,%xmm8,%xmm8,%xmm14
773 vpalignr $8,%xmm8,%xmm8,%xmm14
808 vpalignr $8,%xmm8,%xmm8,%xmm2
812 vpalignr $8,%xmm8,%xmm8,%xmm2
Dsha512-x86_64.S1910 vpalignr $8,%xmm0,%xmm1,%xmm8
1913 vpalignr $8,%xmm4,%xmm5,%xmm11
1987 vpalignr $8,%xmm1,%xmm2,%xmm8
1990 vpalignr $8,%xmm5,%xmm6,%xmm11
2064 vpalignr $8,%xmm2,%xmm3,%xmm8
2067 vpalignr $8,%xmm6,%xmm7,%xmm11
2141 vpalignr $8,%xmm3,%xmm4,%xmm8
2144 vpalignr $8,%xmm7,%xmm0,%xmm11
2218 vpalignr $8,%xmm4,%xmm5,%xmm8
2221 vpalignr $8,%xmm0,%xmm1,%xmm11
[all …]
/external/boringssl/win-x86_64/crypto/fipsmodule/
Dsha512-x86_64.asm1938 vpalignr xmm8,xmm1,xmm0,8
1941 vpalignr xmm11,xmm5,xmm4,8
2015 vpalignr xmm8,xmm2,xmm1,8
2018 vpalignr xmm11,xmm6,xmm5,8
2092 vpalignr xmm8,xmm3,xmm2,8
2095 vpalignr xmm11,xmm7,xmm6,8
2169 vpalignr xmm8,xmm4,xmm3,8
2172 vpalignr xmm11,xmm0,xmm7,8
2246 vpalignr xmm8,xmm5,xmm4,8
2249 vpalignr xmm11,xmm1,xmm0,8
[all …]
Daesni-gcm-x86_64.asm201 vpalignr xmm0,xmm4,xmm4,8
286 vpalignr xmm8,xmm4,xmm4,8
805 vpalignr xmm14,xmm8,xmm8,8
830 vpalignr xmm14,xmm8,xmm8,8
865 vpalignr xmm2,xmm8,xmm8,8
869 vpalignr xmm2,xmm8,xmm8,8

12345