Home
last modified time | relevance | path

Searched refs:xmm6 (Results 1 – 25 of 185) sorted by relevance

12345678

/external/boringssl/mac-x86_64/crypto/aes/
Dbsaes-x86_64.S26 pxor %xmm8,%xmm6
38 pxor %xmm6,%xmm5
42 pxor %xmm5,%xmm6
67 pxor %xmm6,%xmm4
71 pxor %xmm4,%xmm6
95 pxor %xmm6,%xmm2
99 pxor %xmm2,%xmm6
134 pxor 112(%rax),%xmm6
148 pxor %xmm6,%xmm2
149 pxor %xmm4,%xmm6
[all …]
/external/boringssl/linux-x86_64/crypto/aes/
Dbsaes-x86_64.S28 pxor %xmm8,%xmm6
40 pxor %xmm6,%xmm5
44 pxor %xmm5,%xmm6
69 pxor %xmm6,%xmm4
73 pxor %xmm4,%xmm6
97 pxor %xmm6,%xmm2
101 pxor %xmm2,%xmm6
136 pxor 112(%rax),%xmm6
150 pxor %xmm6,%xmm2
151 pxor %xmm4,%xmm6
[all …]
/external/boringssl/win-x86_64/crypto/aes/
Dbsaes-x86_64.asm30 pxor xmm6,xmm8
42 pxor xmm5,xmm6
46 pxor xmm6,xmm5
71 pxor xmm4,xmm6
75 pxor xmm6,xmm4
99 pxor xmm2,xmm6
103 pxor xmm6,xmm2
138 pxor xmm6,XMMWORD[112+rax]
152 pxor xmm2,xmm6
153 pxor xmm6,xmm4
[all …]
/external/libvpx/libvpx/vpx_dsp/x86/
Dhalfpix_variance_impl_sse2.asm31 pxor xmm6, xmm6 ; error accumulator
67 … paddw xmm6, xmm5 ; xmm6 += accumulated column differences
68 paddw xmm6, xmm4
85 punpcklwd xmm0, xmm6
86 punpckhwd xmm1, xmm6
92 movdqa xmm6, xmm7
93 punpckldq xmm6, xmm5
95 paddd xmm6, xmm7
101 movdqa xmm7, xmm6
107 paddd xmm6, xmm7
[all …]
Dhighbd_variance_impl_sse2.asm66 pxor xmm6, xmm6 ; clear xmm6 for accumulating sse
91 paddd xmm6, xmm1
98 paddd xmm6, xmm3
105 paddd xmm6, xmm1
110 paddd xmm6, xmm3
129 movdqa xmm4, xmm6
130 punpckldq xmm6, xmm0
135 paddd xmm6, xmm4
141 movdqa xmm4, xmm6
147 paddd xmm6, xmm4
[all …]
Dvpx_subpixel_8t_sse2.asm29 pshuflw xmm6, xmm7, 10101010b ;k6
35 punpcklqdq xmm6, xmm7
40 movdqa k6k7, xmm6
42 movq xmm6, rcx
43 pshufd xmm6, xmm6, 0
44 movdqa krd, xmm6
52 punpckldq xmm6, xmm7
57 punpcklbw xmm6, zero
62 pmullw xmm6, k6k7
66 paddsw xmm0, xmm6 ;sum
[all …]
/external/libvpx/libvpx/third_party/libyuv/source/
Drotate_win.cc58 movq xmm6, qword ptr [eax] in TransposeWx8_SSSE3()
61 punpcklbw xmm6, xmm7 in TransposeWx8_SSSE3()
63 movdqa xmm7, xmm6 in TransposeWx8_SSSE3()
72 punpcklwd xmm4, xmm6 in TransposeWx8_SSSE3()
74 movdqa xmm6, xmm4 in TransposeWx8_SSSE3()
76 palignr xmm6, xmm6, 8 in TransposeWx8_SSSE3()
86 punpckldq xmm2, xmm6 in TransposeWx8_SSSE3()
87 movdqa xmm6, xmm2 in TransposeWx8_SSSE3()
88 palignr xmm6, xmm6, 8 in TransposeWx8_SSSE3()
91 movq qword ptr [edx + esi], xmm6 in TransposeWx8_SSSE3()
[all …]
/external/boringssl/src/crypto/aes/asm/
Dvpaes-x86_64.pl80 ## Preserves %xmm6 - %xmm8 so you get some local vectors
344 ## the high bits of %xmm6.
355 movdqa %xmm0, %xmm6 # save short part
357 movhlps %xmm4, %xmm6 # clobber low side with zeros
362 palignr \$8,%xmm6,%xmm0
380 ## %xmm6. The low side's rounds are the same as the
391 movdqa %xmm0, %xmm6 # save cur_lo in xmm6
402 movdqa %xmm6, %xmm7
445 pxor %xmm6, %xmm6
457 ## %xmm6: low side, d c 0 0
[all …]
/external/llvm/test/MC/X86/
Dx86_64-xop-encoding.s37 vphaddwq %xmm6, %xmm2
50 vphadduwq (%rcx,%rax), %xmm6
69 vphaddudq %xmm6, %xmm2
109 vphaddbw %xmm5, %xmm6
149 vfrczps %xmm6, %xmm5
191 vpshlq %xmm2, %xmm4, %xmm6
197 vpshlq %xmm5, (%rdx,%rcx), %xmm6
241 vpshaq %xmm6, (%rax,%rcx), %xmm5
268 vprotw (%rax), %xmm3, %xmm6
317 vprotd $43, (%rcx), %xmm6
[all …]
/external/libvpx/libvpx/vp8/common/x86/
Dloopfilter_sse2.asm54 movdqa xmm6, xmm1 ; q2
58 psubusb xmm2, xmm6 ; q3-=q2
60 psubusb xmm4, xmm6 ; q1-=q2
61 psubusb xmm6, xmm3 ; q2-=q1
63 por xmm4, xmm6 ; abs(q2-q1)
80 movdqa xmm6, [rsi+2*rax] ; p1
84 movlps xmm6, [rsi + rcx] ; p1
88 movhps xmm6, [rdi + rcx]
91 movdqa [rsp+_p1], xmm6 ; store p1
95 movdqa xmm3, xmm6 ; p1
[all …]
Dloopfilter_block_sse2_x86_64.asm171 %define scratch2 xmm6
313 %define scratch2 xmm6
390 movdqa xmm6, s14
391 movdqa xmm7, xmm6
392 punpcklbw xmm6, s15 ; e0 f0
396 punpcklwd xmm3, xmm6 ; c0 d0 e0 f0
397 punpckhwd xmm8, xmm6 ; c4 d4 e4 f4
399 movdqa xmm6, xmm5
401 punpckhwd xmm6, xmm7 ; cc dc ec fc
418 punpckldq xmm2, xmm6 ; 8c 9c ac bc cc dc ec fc
[all …]
Didctllm_sse2.asm188 movdqa xmm6, xmm2 ; a1
196 psubw xmm6, xmm3 ;3
204 punpcklwd xmm4, xmm6 ; 015 011 014 010 013 009 012 008
205 punpckhwd xmm5, xmm6 ; 115 111 114 110 113 109 112 108
212 movdqa xmm6, xmm7 ; 107 103 106 102 105 101 104 100
214 punpckhdq xmm6, xmm5 ; 115 111 107 103 114 110 106 102
222 punpckldq xmm1, xmm6 ; 114 110 014 010 106 102 006 002
223 punpckhdq xmm7, xmm6 ; 115 111 015 011 107 103 007 003
260 movdqa xmm6, xmm2 ; a1
268 psubw xmm6, xmm3 ;3
[all …]
Dsubpixel_ssse3.asm63 movdqa xmm6, XMMWORD PTR [rax+128] ;k1_k3
90 pmaddubsw xmm2, xmm6
118 movdqa xmm6, XMMWORD PTR [rax+128] ;k1_k3
146 pmaddubsw xmm2, xmm6
204 movdqa xmm6, XMMWORD PTR [rax+128] ;k1_k3
229 pmaddubsw xmm2, xmm6
247 pmaddubsw xmm2, xmm6
311 movdqa xmm6, XMMWORD PTR [rax+128] ;k1_k3
334 pmaddubsw xmm2, xmm6
362 movdqa xmm6, XMMWORD PTR [rax+128] ;k1_k3
[all …]
Dsubpixel_sse2.asm71 movdqa xmm6, xmm1
85 …psrldq xmm6, 3 ; xx xx xx 0d 0c 0b 0a 09 08 07 06 05 04 03 02…
89 … punpcklbw xmm6, xmm0 ; xx08 xx07 xx06 xx05 xx04 xx03 xx02 xx01
92 pmullw xmm6, [rdx+48] ; x[ 1] * h[ 1] ; Tap 4
108 paddsw xmm4, xmm6
196 movdqa xmm6, xmm1
210 …psrldq xmm6, 3 ; xx xx xx 0d 0c 0b 0a 09 08 07 06 05 04 03 02…
214 … punpcklbw xmm6, xmm0 ; xx08 xx07 xx06 xx05 xx04 xx03 xx02 xx01
217 pmullw xmm6, [rdx+48] ; x[ 1] * h[ 1] ; Tap 4
232 paddsw xmm4, xmm6
[all …]
/external/libjpeg-turbo/simd/
Djccolext-sse2-64.asm298 movdqa xmm6,xmm1
300 punpckhwd xmm6,xmm3
302 movdqa xmm4,xmm6
304 pmaddwd xmm6,[rel PW_F0299_F0337] ; xmm6=ROH*FIX(0.299)+GOH*FIX(0.337)
309 movdqa XMMWORD [wk(5)], xmm6 ; wk(5)=ROH*FIX(0.299)+GOH*FIX(0.337)
312 pxor xmm6,xmm6
314 punpckhwd xmm6,xmm5 ; xmm6=BOH
316 psrld xmm6,1 ; xmm6=BOH*FIX(0.500)
321 paddd xmm4,xmm6
330 movdqa xmm6,xmm0
[all …]
Djccolext-sse2.asm312 movdqa xmm6,xmm1
314 punpckhwd xmm6,xmm3
316 movdqa xmm4,xmm6
318 pmaddwd xmm6,[GOTOFF(eax,PW_F0299_F0337)] ; xmm6=ROH*FIX(0.299)+GOH*FIX(0.337)
323 movdqa XMMWORD [wk(5)], xmm6 ; wk(5)=ROH*FIX(0.299)+GOH*FIX(0.337)
326 pxor xmm6,xmm6
328 punpckhwd xmm6,xmm5 ; xmm6=BOH
330 psrld xmm6,1 ; xmm6=BOH*FIX(0.500)
335 paddd xmm4,xmm6
344 movdqa xmm6,xmm0
[all …]
Djcgryext-sse2-64.asm281 movdqa xmm6,xmm1
283 punpckhwd xmm6,xmm3
285 pmaddwd xmm6,[rel PW_F0299_F0337] ; xmm6=ROH*FIX(0.299)+GOH*FIX(0.337)
287 movdqa xmm7, xmm6 ; xmm7=ROH*FIX(0.299)+GOH*FIX(0.337)
289 movdqa xmm6,xmm0
291 punpckhwd xmm6,xmm2
293 pmaddwd xmm6,[rel PW_F0299_F0337] ; xmm6=REH*FIX(0.299)+GEH*FIX(0.337)
296 movdqa XMMWORD [wk(1)], xmm6 ; wk(1)=REH*FIX(0.299)+GEH*FIX(0.337)
299 movdqa xmm6, xmm4 ; xmm6=BE
317 movdqa xmm4,xmm6
[all …]
/external/llvm/test/CodeGen/X86/
Dvec_minmax_uint.ll84 ; SSE2-NEXT: movdqa %xmm1, %xmm6
85 ; SSE2-NEXT: pxor %xmm4, %xmm6
86 ; SSE2-NEXT: movdqa %xmm6, %xmm7
89 ; SSE2-NEXT: pcmpeqd %xmm5, %xmm6
90 ; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm6[1,1,3,3]
92 ; SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm7[1,1,3,3]
93 ; SSE2-NEXT: por %xmm5, %xmm6
108 ; SSE2-NEXT: pand %xmm6, %xmm1
109 ; SSE2-NEXT: pandn %xmm3, %xmm6
110 ; SSE2-NEXT: por %xmm6, %xmm1
[all …]
Dvselect-minmax.ll626 ; SSE2-NEXT: movdqa %xmm1, %xmm6
627 ; SSE2-NEXT: pcmpgtb %xmm3, %xmm6
629 ; SSE2-NEXT: movdqa %xmm6, %xmm4
637 ; SSE2-NEXT: pandn %xmm1, %xmm6
639 ; SSE2-NEXT: por %xmm6, %xmm4
724 ; SSE2-NEXT: movdqa %xmm3, %xmm6
725 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm6
727 ; SSE2-NEXT: movdqa %xmm6, %xmm5
735 ; SSE2-NEXT: pandn %xmm1, %xmm6
737 ; SSE2-NEXT: por %xmm6, %xmm5
[all …]
/external/boringssl/linux-x86/crypto/aes/
Daesni-x86.S272 pxor %xmm0,%xmm6
327 pxor %xmm0,%xmm6
398 movdqu 64(%esi),%xmm6
413 movups %xmm6,64(%edi)
414 movdqu 64(%esi),%xmm6
429 movups %xmm6,64(%edi)
445 movups 64(%esi),%xmm6
452 movups %xmm6,64(%edi)
500 movdqu 64(%esi),%xmm6
515 movups %xmm6,64(%edi)
[all …]
/external/boringssl/mac-x86/crypto/aes/
Daesni-x86.S255 pxor %xmm0,%xmm6
308 pxor %xmm0,%xmm6
377 movdqu 64(%esi),%xmm6
392 movups %xmm6,64(%edi)
393 movdqu 64(%esi),%xmm6
408 movups %xmm6,64(%edi)
424 movups 64(%esi),%xmm6
431 movups %xmm6,64(%edi)
479 movdqu 64(%esi),%xmm6
494 movups %xmm6,64(%edi)
[all …]
/external/libvpx/libvpx/vp8/encoder/x86/
Dfwalsh_sse2.asm55 pxor xmm6, xmm6
56 movq xmm6, xmm0
58 pcmpeqw xmm7, xmm6
86 pshufd xmm6, xmm1, 0x72 ; d13 d12 a13 a12
92 movdqa xmm1, xmm6
94 punpckhqdq xmm6, xmm7 ; c13 c12 d13 d12
100 paddd xmm1, xmm6 ; b23 b22 a23 a22
101 psubd xmm3, xmm6 ; c23 c22 d23 d22
110 pxor xmm6, xmm6
111 movdqa xmm7, xmm6
[all …]
/external/boringssl/win-x86/crypto/aes/
Daesni-x86.asm260 pxor xmm6,xmm0
312 pxor xmm6,xmm0
380 movdqu xmm6,[64+esi]
395 movups [64+edi],xmm6
396 movdqu xmm6,[64+esi]
411 movups [64+edi],xmm6
427 movups xmm6,[64+esi]
434 movups [64+edi],xmm6
482 movdqu xmm6,[64+esi]
497 movups [64+edi],xmm6
[all …]
/external/boringssl/mac-x86_64/crypto/sha/
Dsha256-x86_64.S1799 movdqa 64(%rbp),%xmm6
1803 paddd %xmm2,%xmm6
1809 movdqa %xmm6,32(%rsp)
1839 movdqa %xmm4,%xmm6
1849 psrld $7,%xmm6
1858 pxor %xmm6,%xmm4
1863 psrld $11,%xmm6
1871 pxor %xmm6,%xmm4
1874 movdqa %xmm7,%xmm6
1887 psrlq $17,%xmm6
[all …]
/external/boringssl/linux-x86_64/crypto/sha/
Dsha256-x86_64.S1800 movdqa 64(%rbp),%xmm6
1804 paddd %xmm2,%xmm6
1810 movdqa %xmm6,32(%rsp)
1840 movdqa %xmm4,%xmm6
1850 psrld $7,%xmm6
1859 pxor %xmm6,%xmm4
1864 psrld $11,%xmm6
1872 pxor %xmm6,%xmm4
1875 movdqa %xmm7,%xmm6
1888 psrlq $17,%xmm6
[all …]

12345678