Home
last modified time | relevance | path

Searched refs:xmm10 (Results 1 – 25 of 202) sorted by relevance

123456789

/external/capstone/suite/MC/X86/
Dx86_64-fma3-encoding.s.cs2 0xc4,0x42,0xa9,0x98,0xdc = vfmadd132pd %xmm12, %xmm10, %xmm11
3 0xc4,0x62,0xa9,0x98,0x18 = vfmadd132pd (%rax), %xmm10, %xmm11
4 0xc4,0x42,0x29,0x98,0xdc = vfmadd132ps %xmm12, %xmm10, %xmm11
5 0xc4,0x62,0x29,0x98,0x18 = vfmadd132ps (%rax), %xmm10, %xmm11
6 0xc4,0x42,0xa9,0xa8,0xdc = vfmadd213pd %xmm12, %xmm10, %xmm11
7 0xc4,0x62,0xa9,0xa8,0x18 = vfmadd213pd (%rax), %xmm10, %xmm11
8 0xc4,0x42,0x29,0xa8,0xdc = vfmadd213ps %xmm12, %xmm10, %xmm11
9 0xc4,0x62,0x29,0xa8,0x18 = vfmadd213ps (%rax), %xmm10, %xmm11
10 0xc4,0x42,0xa9,0xb8,0xdc = vfmadd231pd %xmm12, %xmm10, %xmm11
11 0xc4,0x62,0xa9,0xb8,0x18 = vfmadd231pd (%rax), %xmm10, %xmm11
[all …]
Dx86_64-avx-encoding.s.cs2 0xc4,0x41,0x32,0x58,0xd0 = vaddss %xmm8, %xmm9, %xmm10
3 0xc4,0x41,0x32,0x59,0xd0 = vmulss %xmm8, %xmm9, %xmm10
4 0xc4,0x41,0x32,0x5c,0xd0 = vsubss %xmm8, %xmm9, %xmm10
5 0xc4,0x41,0x32,0x5e,0xd0 = vdivss %xmm8, %xmm9, %xmm10
6 0xc4,0x41,0x33,0x58,0xd0 = vaddsd %xmm8, %xmm9, %xmm10
7 0xc4,0x41,0x33,0x59,0xd0 = vmulsd %xmm8, %xmm9, %xmm10
8 0xc4,0x41,0x33,0x5c,0xd0 = vsubsd %xmm8, %xmm9, %xmm10
9 0xc4,0x41,0x33,0x5e,0xd0 = vdivsd %xmm8, %xmm9, %xmm10
10 0xc5,0x2a,0x58,0x5c,0xd9,0xfc = vaddss -4(%rcx, %rbx, 8), %xmm10, %xmm11
11 0xc5,0x2a,0x5c,0x5c,0xd9,0xfc = vsubss -4(%rcx, %rbx, 8), %xmm10, %xmm11
[all …]
Dx86_64-avx-clmul-encoding.s.cs2 0xc4,0x43,0x29,0x44,0xdc,0x11 = vpclmulhqhqdq %xmm12, %xmm10, %xmm11
3 0xc4,0x63,0x29,0x44,0x28,0x11 = vpclmulhqhqdq (%rax), %xmm10, %xmm13
4 0xc4,0x43,0x29,0x44,0xdc,0x01 = vpclmulhqlqdq %xmm12, %xmm10, %xmm11
5 0xc4,0x63,0x29,0x44,0x28,0x01 = vpclmulhqlqdq (%rax), %xmm10, %xmm13
6 0xc4,0x43,0x29,0x44,0xdc,0x10 = vpclmullqhqdq %xmm12, %xmm10, %xmm11
7 0xc4,0x63,0x29,0x44,0x28,0x10 = vpclmullqhqdq (%rax), %xmm10, %xmm13
8 0xc4,0x43,0x29,0x44,0xdc,0x00 = vpclmullqlqdq %xmm12, %xmm10, %xmm11
9 0xc4,0x63,0x29,0x44,0x28,0x00 = vpclmullqlqdq (%rax), %xmm10, %xmm13
10 0xc4,0x43,0x29,0x44,0xdc,0x11 = vpclmulqdq $17, %xmm12, %xmm10, %xmm11
11 0xc4,0x63,0x29,0x44,0x28,0x11 = vpclmulqdq $17, (%rax), %xmm10, %xmm13
/external/swiftshader/third_party/LLVM/test/MC/X86/
Dx86_64-fma3-encoding.s5 vfmadd132pd %xmm12, %xmm10, %xmm11
9 vfmadd132pd (%rax), %xmm10, %xmm11
13 vfmadd132ps %xmm12, %xmm10, %xmm11
17 vfmadd132ps (%rax), %xmm10, %xmm11
21 vfmadd213pd %xmm12, %xmm10, %xmm11
25 vfmadd213pd (%rax), %xmm10, %xmm11
29 vfmadd213ps %xmm12, %xmm10, %xmm11
33 vfmadd213ps (%rax), %xmm10, %xmm11
37 vfmadd231pd %xmm12, %xmm10, %xmm11
41 vfmadd231pd (%rax), %xmm10, %xmm11
[all …]
Dx86_64-avx-encoding.s5 vaddss %xmm8, %xmm9, %xmm10
9 vmulss %xmm8, %xmm9, %xmm10
13 vsubss %xmm8, %xmm9, %xmm10
17 vdivss %xmm8, %xmm9, %xmm10
21 vaddsd %xmm8, %xmm9, %xmm10
25 vmulsd %xmm8, %xmm9, %xmm10
29 vsubsd %xmm8, %xmm9, %xmm10
33 vdivsd %xmm8, %xmm9, %xmm10
37 vaddss -4(%rcx,%rbx,8), %xmm10, %xmm11
41 vsubss -4(%rcx,%rbx,8), %xmm10, %xmm11
[all …]
Dx86_64-avx-clmul-encoding.s5 vpclmulhqhqdq %xmm12, %xmm10, %xmm11
9 vpclmulhqhqdq (%rax), %xmm10, %xmm13
13 vpclmulhqlqdq %xmm12, %xmm10, %xmm11
17 vpclmulhqlqdq (%rax), %xmm10, %xmm13
21 vpclmullqhqdq %xmm12, %xmm10, %xmm11
25 vpclmullqhqdq (%rax), %xmm10, %xmm13
29 vpclmullqlqdq %xmm12, %xmm10, %xmm11
33 vpclmullqlqdq (%rax), %xmm10, %xmm13
37 vpclmulqdq $17, %xmm12, %xmm10, %xmm11
41 vpclmulqdq $17, (%rax), %xmm10, %xmm13
/external/llvm/test/MC/X86/
Dx86_64-fma3-encoding.s5 vfmadd132pd %xmm12, %xmm10, %xmm11
9 vfmadd132pd (%rax), %xmm10, %xmm11
13 vfmadd132ps %xmm12, %xmm10, %xmm11
17 vfmadd132ps (%rax), %xmm10, %xmm11
21 vfmadd213pd %xmm12, %xmm10, %xmm11
25 vfmadd213pd (%rax), %xmm10, %xmm11
29 vfmadd213ps %xmm12, %xmm10, %xmm11
33 vfmadd213ps (%rax), %xmm10, %xmm11
37 vfmadd231pd %xmm12, %xmm10, %xmm11
41 vfmadd231pd (%rax), %xmm10, %xmm11
[all …]
Dx86_64-avx-encoding.s5 vaddss %xmm8, %xmm9, %xmm10
9 vmulss %xmm8, %xmm9, %xmm10
13 vsubss %xmm8, %xmm9, %xmm10
17 vdivss %xmm8, %xmm9, %xmm10
21 vaddsd %xmm8, %xmm9, %xmm10
25 vmulsd %xmm8, %xmm9, %xmm10
29 vsubsd %xmm8, %xmm9, %xmm10
33 vdivsd %xmm8, %xmm9, %xmm10
37 vaddss -4(%rcx,%rbx,8), %xmm10, %xmm11
41 vsubss -4(%rcx,%rbx,8), %xmm10, %xmm11
[all …]
Dx86_64-avx-clmul-encoding.s5 vpclmulhqhqdq %xmm12, %xmm10, %xmm11
9 vpclmulhqhqdq (%rax), %xmm10, %xmm13
13 vpclmulhqlqdq %xmm12, %xmm10, %xmm11
17 vpclmulhqlqdq (%rax), %xmm10, %xmm13
21 vpclmullqhqdq %xmm12, %xmm10, %xmm11
25 vpclmullqhqdq (%rax), %xmm10, %xmm13
29 vpclmullqlqdq %xmm12, %xmm10, %xmm11
33 vpclmullqlqdq (%rax), %xmm10, %xmm13
37 vpclmulqdq $17, %xmm12, %xmm10, %xmm11
41 vpclmulqdq $17, (%rax), %xmm10, %xmm13
Dintel-syntax-x86-64-avx.s21 vgatherdps xmm10, xmmword ptr [r15 + 2*xmm9], xmm8
25 vgatherqps xmm10, qword ptr [r15 + 2*xmm9], xmm8
33 vgatherqps xmm10, xmmword ptr [r15 + 2*ymm9], xmm8
53 vpgatherdd xmm10, xmmword ptr [r15 + 2*xmm9], xmm8
57 vpgatherqd xmm10, qword ptr [r15 + 2*xmm9], xmm8
65 vpgatherqd xmm10, xmmword ptr [r15 + 2*ymm9], xmm8
/external/swiftshader/third_party/llvm-7.0/llvm/test/MC/X86/
Dx86_64-fma3-encoding.s5 vfmadd132pd %xmm12, %xmm10, %xmm11
9 vfmadd132pd (%rax), %xmm10, %xmm11
13 vfmadd132ps %xmm12, %xmm10, %xmm11
17 vfmadd132ps (%rax), %xmm10, %xmm11
21 vfmadd213pd %xmm12, %xmm10, %xmm11
25 vfmadd213pd (%rax), %xmm10, %xmm11
29 vfmadd213ps %xmm12, %xmm10, %xmm11
33 vfmadd213ps (%rax), %xmm10, %xmm11
37 vfmadd231pd %xmm12, %xmm10, %xmm11
41 vfmadd231pd (%rax), %xmm10, %xmm11
[all …]
Dgfni-encoding.s89 vgf2p8affineinvqb $7, %xmm2, %xmm10, %xmm1
93 vgf2p8affineqb $7, %xmm2, %xmm10, %xmm1
97 vgf2p8affineinvqb $7, (%rcx), %xmm10, %xmm1
101 vgf2p8affineinvqb $7, -4(%rsp), %xmm10, %xmm1
105 vgf2p8affineinvqb $7, 4(%rsp), %xmm10, %xmm1
109 vgf2p8affineinvqb $7, 268435456(%rcx,%r14,8), %xmm10, %xmm1
113 vgf2p8affineinvqb $7, -536870912(%rcx,%r14,8), %xmm10, %xmm1
117 vgf2p8affineinvqb $7, -536870910(%rcx,%r14,8), %xmm10, %xmm1
121 vgf2p8affineqb $7, (%rcx), %xmm10, %xmm1
125 vgf2p8affineqb $7, -4(%rsp), %xmm10, %xmm1
[all …]
Dx86_64-avx-encoding.s5 vaddss %xmm8, %xmm9, %xmm10
9 vmulss %xmm8, %xmm9, %xmm10
13 vsubss %xmm8, %xmm9, %xmm10
17 vdivss %xmm8, %xmm9, %xmm10
21 vaddsd %xmm8, %xmm9, %xmm10
25 vmulsd %xmm8, %xmm9, %xmm10
29 vsubsd %xmm8, %xmm9, %xmm10
33 vdivsd %xmm8, %xmm9, %xmm10
37 vaddss -4(%rcx,%rbx,8), %xmm10, %xmm11
41 vsubss -4(%rcx,%rbx,8), %xmm10, %xmm11
[all …]
Dx86_64-avx-clmul-encoding.s5 vpclmulhqhqdq %xmm12, %xmm10, %xmm11
9 vpclmulhqhqdq (%rax), %xmm10, %xmm13
13 vpclmulhqlqdq %xmm12, %xmm10, %xmm11
17 vpclmulhqlqdq (%rax), %xmm10, %xmm13
21 vpclmullqhqdq %xmm12, %xmm10, %xmm11
25 vpclmullqhqdq (%rax), %xmm10, %xmm13
29 vpclmullqlqdq %xmm12, %xmm10, %xmm11
33 vpclmullqlqdq (%rax), %xmm10, %xmm13
37 vpclmulqdq $17, %xmm12, %xmm10, %xmm11
41 vpclmulqdq $17, (%rax), %xmm10, %xmm13
/external/boringssl/linux-x86_64/crypto/fipsmodule/
Daesni-gcm-x86_64.S24 vpaddb %xmm2,%xmm1,%xmm10
25 vpaddb %xmm2,%xmm10,%xmm11
39 vpxor %xmm15,%xmm10,%xmm10
72 vaesenc %xmm2,%xmm10,%xmm10
96 vaesenc %xmm15,%xmm10,%xmm10
113 vaesenc %xmm15,%xmm10,%xmm10
131 vaesenc %xmm15,%xmm10,%xmm10
153 vaesenc %xmm15,%xmm10,%xmm10
175 vaesenc %xmm15,%xmm10,%xmm10
197 vaesenc %xmm15,%xmm10,%xmm10
[all …]
/external/boringssl/mac-x86_64/crypto/fipsmodule/
Daesni-gcm-x86_64.S24 vpaddb %xmm2,%xmm1,%xmm10
25 vpaddb %xmm2,%xmm10,%xmm11
39 vpxor %xmm15,%xmm10,%xmm10
72 vaesenc %xmm2,%xmm10,%xmm10
96 vaesenc %xmm15,%xmm10,%xmm10
113 vaesenc %xmm15,%xmm10,%xmm10
131 vaesenc %xmm15,%xmm10,%xmm10
153 vaesenc %xmm15,%xmm10,%xmm10
175 vaesenc %xmm15,%xmm10,%xmm10
197 vaesenc %xmm15,%xmm10,%xmm10
[all …]
/external/boringssl/win-x86_64/crypto/fipsmodule/
Daesni-gcm-x86_64.asm23 vpaddb xmm10,xmm1,xmm2
24 vpaddb xmm11,xmm10,xmm2
38 vpxor xmm10,xmm10,xmm15
71 vaesenc xmm10,xmm10,xmm2
95 vaesenc xmm10,xmm10,xmm15
112 vaesenc xmm10,xmm10,xmm15
130 vaesenc xmm10,xmm10,xmm15
152 vaesenc xmm10,xmm10,xmm15
174 vaesenc xmm10,xmm10,xmm15
196 vaesenc xmm10,xmm10,xmm15
[all …]
/external/boringssl/src/crypto/fipsmodule/modes/asm/
Dghash-ssse3-x86_64.pl113 movdqa %xmm10, 16(%rsp)
119 movdqa .Lreverse_bytes(%rip), %xmm10
123 pshufb %xmm10, %xmm0
214 pshufb %xmm10, %xmm2
228 movdqa 16(%rsp), %xmm10
256 movdqa %xmm10, 16(%rsp)
264 movdqa .Lreverse_bytes(%rip), %xmm10
272 pshufb %xmm10, %xmm0
279 pshufb %xmm10, %xmm1 # Reverse bytes.
312 pshufb %xmm10, %xmm0
[all …]
/external/boringssl/src/crypto/fipsmodule/aes/asm/
Dvpaes-x86_64.pl149 movdqa %xmm10, %xmm3 # 3 : 1/i
152 movdqa %xmm10, %xmm4 # 4 : 1/j
155 movdqa %xmm10, %xmm2 # 2 : 1/iak
158 movdqa %xmm10, %xmm3 # 3 : 1/jak
197 ## %xmm10 in registers as these values are used several times in a row. The
302 movdqa %xmm10, %xmm3 # 3 : 1/i
303 movdqa %xmm10, %xmm11
308 movdqa %xmm10, %xmm4 # 4 : 1/j
309 movdqa %xmm10, %xmm12
314 movdqa %xmm10, %xmm2 # 2 : 1/iak
[all …]
/external/boringssl/linux-x86_64/crypto/cipher_extra/
Daes128gcmsiv-x86_64.S634 vpaddd %xmm13,%xmm9,%xmm10
635 vpaddd %xmm13,%xmm10,%xmm11
639 vpxor %xmm1,%xmm10,%xmm10
645 vaesenc %xmm1,%xmm10,%xmm10
651 vaesenc %xmm2,%xmm10,%xmm10
657 vaesenc %xmm1,%xmm10,%xmm10
663 vaesenc %xmm2,%xmm10,%xmm10
669 vaesenc %xmm1,%xmm10,%xmm10
675 vaesenc %xmm2,%xmm10,%xmm10
681 vaesenc %xmm1,%xmm10,%xmm10
[all …]
/external/boringssl/mac-x86_64/crypto/cipher_extra/
Daes128gcmsiv-x86_64.S634 vpaddd %xmm13,%xmm9,%xmm10
635 vpaddd %xmm13,%xmm10,%xmm11
639 vpxor %xmm1,%xmm10,%xmm10
645 vaesenc %xmm1,%xmm10,%xmm10
651 vaesenc %xmm2,%xmm10,%xmm10
657 vaesenc %xmm1,%xmm10,%xmm10
663 vaesenc %xmm2,%xmm10,%xmm10
669 vaesenc %xmm1,%xmm10,%xmm10
675 vaesenc %xmm2,%xmm10,%xmm10
681 vaesenc %xmm1,%xmm10,%xmm10
[all …]
/external/boringssl/win-x86_64/crypto/cipher_extra/
Daes128gcmsiv-x86_64.asm715 vpaddd xmm10,xmm9,xmm13
716 vpaddd xmm11,xmm10,xmm13
720 vpxor xmm10,xmm10,xmm1
726 vaesenc xmm10,xmm10,xmm1
732 vaesenc xmm10,xmm10,xmm2
738 vaesenc xmm10,xmm10,xmm1
744 vaesenc xmm10,xmm10,xmm2
750 vaesenc xmm10,xmm10,xmm1
756 vaesenc xmm10,xmm10,xmm2
762 vaesenc xmm10,xmm10,xmm1
[all …]
/external/swiftshader/third_party/llvm-7.0/llvm/test/CodeGen/X86/
Dstack-folding-fp-sse42.ll14 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
22 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
30 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
38 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
49 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
57 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
68 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
77 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
86 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
100 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
[all …]
Dvector-trunc-packus.ll285 ; SSE2-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,0,2147483648,0]
287 ; SSE2-NEXT: pxor %xmm10, %xmm5
301 ; SSE2-NEXT: pxor %xmm10, %xmm0
314 ; SSE2-NEXT: pxor %xmm10, %xmm1
327 ; SSE2-NEXT: pxor %xmm10, %xmm1
340 ; SSE2-NEXT: pxor %xmm10, %xmm1
342 ; SSE2-NEXT: pcmpgtd %xmm10, %xmm3
344 ; SSE2-NEXT: pcmpeqd %xmm10, %xmm1
351 ; SSE2-NEXT: pxor %xmm10, %xmm1
353 ; SSE2-NEXT: pcmpgtd %xmm10, %xmm2
[all …]
Dstack-folding-fp-avx1.ll14 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
22 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
30 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
38 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
46 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
54 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
66 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
74 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
86 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
95 …p", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12}…
[all …]

123456789