Home
last modified time | relevance | path

Searched refs:xmm6 (Results 1 – 25 of 198) sorted by relevance

12345678

/external/openssl/crypto/aes/asm/
Dbsaes-x86_64.S27 pxor %xmm8,%xmm6
37 pxor %xmm6,%xmm5
41 pxor %xmm5,%xmm6
66 pxor %xmm6,%xmm4
70 pxor %xmm4,%xmm6
94 pxor %xmm6,%xmm2
98 pxor %xmm2,%xmm6
135 pxor 112(%rax),%xmm6
147 pxor %xmm6,%xmm2
148 pxor %xmm4,%xmm6
[all …]
Dvpaes-x86_64.pl79 ## Preserves %xmm6 - %xmm8 so you get some local vectors
344 ## the high bits of %xmm6.
355 movdqa %xmm0, %xmm6 # save short part
357 movhlps %xmm4, %xmm6 # clobber low side with zeros
362 palignr \$8,%xmm6,%xmm0
380 ## %xmm6. The low side's rounds are the same as the
391 movdqa %xmm0, %xmm6 # save cur_lo in xmm6
402 movdqa %xmm6, %xmm7
445 pxor %xmm6, %xmm6
457 ## %xmm6: low side, d c 0 0
[all …]
Daesni-x86.S200 pxor %xmm0,%xmm6
257 pxor %xmm0,%xmm6
326 movdqu 64(%esi),%xmm6
341 movups %xmm6,64(%edi)
342 movdqu 64(%esi),%xmm6
357 movups %xmm6,64(%edi)
373 movups 64(%esi),%xmm6
380 movups %xmm6,64(%edi)
429 movdqu 64(%esi),%xmm6
444 movups %xmm6,64(%edi)
[all …]
/external/chromium_org/third_party/boringssl/mac-x86_64/crypto/aes/
Dbsaes-x86_64.S26 pxor %xmm8,%xmm6
38 pxor %xmm6,%xmm5
42 pxor %xmm5,%xmm6
67 pxor %xmm6,%xmm4
71 pxor %xmm4,%xmm6
95 pxor %xmm6,%xmm2
99 pxor %xmm2,%xmm6
134 pxor 112(%rax),%xmm6
148 pxor %xmm6,%xmm2
149 pxor %xmm4,%xmm6
[all …]
/external/chromium_org/third_party/boringssl/linux-x86_64/crypto/aes/
Dbsaes-x86_64.S26 pxor %xmm8,%xmm6
38 pxor %xmm6,%xmm5
42 pxor %xmm5,%xmm6
67 pxor %xmm6,%xmm4
71 pxor %xmm4,%xmm6
95 pxor %xmm6,%xmm2
99 pxor %xmm2,%xmm6
134 pxor 112(%rax),%xmm6
148 pxor %xmm6,%xmm2
149 pxor %xmm4,%xmm6
[all …]
/external/chromium_org/third_party/boringssl/win-x86_64/crypto/aes/
Dbsaes-x86_64.asm26 pxor xmm6,xmm8
38 pxor xmm5,xmm6
42 pxor xmm6,xmm5
67 pxor xmm4,xmm6
71 pxor xmm6,xmm4
95 pxor xmm2,xmm6
99 pxor xmm6,xmm2
134 pxor xmm6,XMMWORD PTR[112+rax]
148 pxor xmm2,xmm6
149 pxor xmm6,xmm4
[all …]
/external/libvpx/libvpx/vp9/encoder/x86/
Dvp9_subpel_variance_impl_sse2.asm34 pxor xmm6, xmm6 ; error accumulator
70 … paddw xmm6, xmm5 ; xmm6 += accumulated column differences
71 paddw xmm6, xmm4
88 punpcklwd xmm0, xmm6
89 punpckhwd xmm1, xmm6
95 movdqa xmm6, xmm7
96 punpckldq xmm6, xmm5
98 paddd xmm6, xmm7
104 movdqa xmm7, xmm6
110 paddd xmm6, xmm7
[all …]
Dvp9_variance_impl_sse2.asm124 pxor xmm6, xmm6 ; clear xmm6 for accumulating sse
154 paddd xmm6, xmm1
155 paddd xmm6, xmm3
164 movdqa xmm1, xmm6
165 pxor xmm6, xmm6
168 punpcklwd xmm6, xmm7
173 psrad xmm6, 16
174 paddd xmm6, xmm5
180 movdqa xmm7, xmm6
183 punpckldq xmm6, xmm0
[all …]
/external/chromium_org/third_party/libvpx/source/libvpx/vp8/common/x86/
Dvariance_impl_sse2.asm126 pxor xmm6, xmm6 ; clear xmm6 for accumulating sse
156 paddd xmm6, xmm1
157 paddd xmm6, xmm3
166 movdqa xmm1, xmm6
167 pxor xmm6, xmm6
170 punpcklwd xmm6, xmm7
175 psrad xmm6, 16
176 paddd xmm6, xmm5
182 movdqa xmm7, xmm6
185 punpckldq xmm6, xmm0
[all …]
Dloopfilter_sse2.asm54 movdqa xmm6, xmm1 ; q2
58 psubusb xmm2, xmm6 ; q3-=q2
60 psubusb xmm4, xmm6 ; q1-=q2
61 psubusb xmm6, xmm3 ; q2-=q1
63 por xmm4, xmm6 ; abs(q2-q1)
80 movdqa xmm6, [rsi+2*rax] ; p1
84 movlps xmm6, [rsi + rcx] ; p1
88 movhps xmm6, [rdi + rcx]
91 movdqa [rsp+_p1], xmm6 ; store p1
95 movdqa xmm3, xmm6 ; p1
[all …]
Dloopfilter_block_sse2_x86_64.asm171 %define scratch2 xmm6
313 %define scratch2 xmm6
390 movdqa xmm6, s14
391 movdqa xmm7, xmm6
392 punpcklbw xmm6, s15 ; e0 f0
396 punpcklwd xmm3, xmm6 ; c0 d0 e0 f0
397 punpckhwd xmm8, xmm6 ; c4 d4 e4 f4
399 movdqa xmm6, xmm5
401 punpckhwd xmm6, xmm7 ; cc dc ec fc
418 punpckldq xmm2, xmm6 ; 8c 9c ac bc cc dc ec fc
[all …]
Didctllm_sse2.asm188 movdqa xmm6, xmm2 ; a1
196 psubw xmm6, xmm3 ;3
204 punpcklwd xmm4, xmm6 ; 015 011 014 010 013 009 012 008
205 punpckhwd xmm5, xmm6 ; 115 111 114 110 113 109 112 108
212 movdqa xmm6, xmm7 ; 107 103 106 102 105 101 104 100
214 punpckhdq xmm6, xmm5 ; 115 111 107 103 114 110 106 102
222 punpckldq xmm1, xmm6 ; 114 110 014 010 106 102 006 002
223 punpckhdq xmm7, xmm6 ; 115 111 015 011 107 103 007 003
260 movdqa xmm6, xmm2 ; a1
268 psubw xmm6, xmm3 ;3
[all …]
Dsubpixel_ssse3.asm63 movdqa xmm6, XMMWORD PTR [rax+128] ;k1_k3
90 pmaddubsw xmm2, xmm6
118 movdqa xmm6, XMMWORD PTR [rax+128] ;k1_k3
146 pmaddubsw xmm2, xmm6
204 movdqa xmm6, XMMWORD PTR [rax+128] ;k1_k3
229 pmaddubsw xmm2, xmm6
247 pmaddubsw xmm2, xmm6
311 movdqa xmm6, XMMWORD PTR [rax+128] ;k1_k3
334 pmaddubsw xmm2, xmm6
362 movdqa xmm6, XMMWORD PTR [rax+128] ;k1_k3
[all …]
/external/libvpx/libvpx/vp8/common/x86/
Dvariance_impl_sse2.asm126 pxor xmm6, xmm6 ; clear xmm6 for accumulating sse
156 paddd xmm6, xmm1
157 paddd xmm6, xmm3
166 movdqa xmm1, xmm6
167 pxor xmm6, xmm6
170 punpcklwd xmm6, xmm7
175 psrad xmm6, 16
176 paddd xmm6, xmm5
182 movdqa xmm7, xmm6
185 punpckldq xmm6, xmm0
[all …]
Dloopfilter_sse2.asm54 movdqa xmm6, xmm1 ; q2
58 psubusb xmm2, xmm6 ; q3-=q2
60 psubusb xmm4, xmm6 ; q1-=q2
61 psubusb xmm6, xmm3 ; q2-=q1
63 por xmm4, xmm6 ; abs(q2-q1)
80 movdqa xmm6, [rsi+2*rax] ; p1
84 movlps xmm6, [rsi + rcx] ; p1
88 movhps xmm6, [rdi + rcx]
91 movdqa [rsp+_p1], xmm6 ; store p1
95 movdqa xmm3, xmm6 ; p1
[all …]
Dloopfilter_block_sse2.asm171 %define scratch2 xmm6
313 %define scratch2 xmm6
390 movdqa xmm6, s14
391 movdqa xmm7, xmm6
392 punpcklbw xmm6, s15 ; e0 f0
396 punpcklwd xmm3, xmm6 ; c0 d0 e0 f0
397 punpckhwd xmm8, xmm6 ; c4 d4 e4 f4
399 movdqa xmm6, xmm5
401 punpckhwd xmm6, xmm7 ; cc dc ec fc
418 punpckldq xmm2, xmm6 ; 8c 9c ac bc cc dc ec fc
[all …]
Didctllm_sse2.asm188 movdqa xmm6, xmm2 ; a1
196 psubw xmm6, xmm3 ;3
204 punpcklwd xmm4, xmm6 ; 015 011 014 010 013 009 012 008
205 punpckhwd xmm5, xmm6 ; 115 111 114 110 113 109 112 108
212 movdqa xmm6, xmm7 ; 107 103 106 102 105 101 104 100
214 punpckhdq xmm6, xmm5 ; 115 111 107 103 114 110 106 102
222 punpckldq xmm1, xmm6 ; 114 110 014 010 106 102 006 002
223 punpckhdq xmm7, xmm6 ; 115 111 015 011 107 103 007 003
260 movdqa xmm6, xmm2 ; a1
268 psubw xmm6, xmm3 ;3
[all …]
/external/chromium_org/third_party/boringssl/src/crypto/aes/asm/
Dvpaes-x86_64.pl79 ## Preserves %xmm6 - %xmm8 so you get some local vectors
343 ## the high bits of %xmm6.
354 movdqa %xmm0, %xmm6 # save short part
356 movhlps %xmm4, %xmm6 # clobber low side with zeros
361 palignr \$8,%xmm6,%xmm0
379 ## %xmm6. The low side's rounds are the same as the
390 movdqa %xmm0, %xmm6 # save cur_lo in xmm6
401 movdqa %xmm6, %xmm7
444 pxor %xmm6, %xmm6
456 ## %xmm6: low side, d c 0 0
[all …]
/external/llvm/test/MC/X86/
Dx86_64-xop-encoding.s37 vphaddwq %xmm6, %xmm2
50 vphadduwq (%rcx,%rax), %xmm6
69 vphaddudq %xmm6, %xmm2
109 vphaddbw %xmm5, %xmm6
149 vfrczps %xmm6, %xmm5
191 vpshlq %xmm2, %xmm4, %xmm6
197 vpshlq %xmm5, (%rdx,%rcx), %xmm6
241 vpshaq %xmm6, (%rax,%rcx), %xmm5
268 vprotw (%rax), %xmm3, %xmm6
317 vprotd $43, (%rcx), %xmm6
[all …]
/external/chromium_org/third_party/libjpeg_turbo/simd/
Djcclrss2-64.asm298 movdqa xmm6,xmm1
300 punpckhwd xmm6,xmm3
302 movdqa xmm4,xmm6
304 pmaddwd xmm6,[rel PW_F0299_F0337] ; xmm6=ROH*FIX(0.299)+GOH*FIX(0.337)
309 movdqa XMMWORD [wk(5)], xmm6 ; wk(5)=ROH*FIX(0.299)+GOH*FIX(0.337)
312 pxor xmm6,xmm6
314 punpckhwd xmm6,xmm5 ; xmm6=BOH
316 psrld xmm6,1 ; xmm6=BOH*FIX(0.500)
321 paddd xmm4,xmm6
330 movdqa xmm6,xmm0
[all …]
Djcclrss2.asm312 movdqa xmm6,xmm1
314 punpckhwd xmm6,xmm3
316 movdqa xmm4,xmm6
318 pmaddwd xmm6,[GOTOFF(eax,PW_F0299_F0337)] ; xmm6=ROH*FIX(0.299)+GOH*FIX(0.337)
323 movdqa XMMWORD [wk(5)], xmm6 ; wk(5)=ROH*FIX(0.299)+GOH*FIX(0.337)
326 pxor xmm6,xmm6
328 punpckhwd xmm6,xmm5 ; xmm6=BOH
330 psrld xmm6,1 ; xmm6=BOH*FIX(0.500)
335 paddd xmm4,xmm6
344 movdqa xmm6,xmm0
[all …]
/external/chromium_org/third_party/boringssl/mac-x86/crypto/aes/
Daesni-x86.S249 pxor %xmm0,%xmm6
304 pxor %xmm0,%xmm6
375 movdqu 64(%esi),%xmm6
390 movups %xmm6,64(%edi)
391 movdqu 64(%esi),%xmm6
406 movups %xmm6,64(%edi)
422 movups 64(%esi),%xmm6
429 movups %xmm6,64(%edi)
477 movdqu 64(%esi),%xmm6
492 movups %xmm6,64(%edi)
[all …]
/external/libvpx/libvpx/vp8/encoder/x86/
Dfwalsh_sse2.asm55 pxor xmm6, xmm6
56 movq xmm6, xmm0
58 pcmpeqw xmm7, xmm6
86 pshufd xmm6, xmm1, 0x72 ; d13 d12 a13 a12
92 movdqa xmm1, xmm6
94 punpckhqdq xmm6, xmm7 ; c13 c12 d13 d12
100 paddd xmm1, xmm6 ; b23 b22 a23 a22
101 psubd xmm3, xmm6 ; c23 c22 d23 d22
110 pxor xmm6, xmm6
111 movdqa xmm7, xmm6
[all …]
/external/chromium_org/third_party/libvpx/source/libvpx/vp8/encoder/x86/
Dfwalsh_sse2.asm55 pxor xmm6, xmm6
56 movq xmm6, xmm0
58 pcmpeqw xmm7, xmm6
86 pshufd xmm6, xmm1, 0x72 ; d13 d12 a13 a12
92 movdqa xmm1, xmm6
94 punpckhqdq xmm6, xmm7 ; c13 c12 d13 d12
100 paddd xmm1, xmm6 ; b23 b22 a23 a22
101 psubd xmm3, xmm6 ; c23 c22 d23 d22
110 pxor xmm6, xmm6
111 movdqa xmm7, xmm6
[all …]
/external/chromium_org/third_party/boringssl/linux-x86/crypto/aes/
Daesni-x86.S266 pxor %xmm0,%xmm6
323 pxor %xmm0,%xmm6
396 movdqu 64(%esi),%xmm6
411 movups %xmm6,64(%edi)
412 movdqu 64(%esi),%xmm6
427 movups %xmm6,64(%edi)
443 movups 64(%esi),%xmm6
450 movups %xmm6,64(%edi)
498 movdqu 64(%esi),%xmm6
513 movups %xmm6,64(%edi)
[all …]

12345678