/external/swiftshader/third_party/llvm-7.0/llvm/test/MC/X86/ |
D | AVX-64.s | 9 vaddpd 485498096, %xmm6, %xmm6 29 vaddpd -64(%rdx,%rax,4), %xmm6, %xmm6 33 vaddpd 64(%rdx,%rax,4), %xmm6, %xmm6 57 vaddpd 64(%rdx,%rax), %xmm6, %xmm6 73 vaddpd 64(%rdx), %xmm6, %xmm6 89 vaddpd (%rdx), %xmm6, %xmm6 105 vaddpd %xmm6, %xmm6, %xmm6 121 vaddps 485498096, %xmm6, %xmm6 141 vaddps -64(%rdx,%rax,4), %xmm6, %xmm6 145 vaddps 64(%rdx,%rax,4), %xmm6, %xmm6 [all …]
|
D | FMA-64.s | 9 vfmadd132pd 485498096, %xmm6, %xmm6 29 vfmadd132pd -64(%rdx,%rax,4), %xmm6, %xmm6 33 vfmadd132pd 64(%rdx,%rax,4), %xmm6, %xmm6 57 vfmadd132pd 64(%rdx,%rax), %xmm6, %xmm6 73 vfmadd132pd 64(%rdx), %xmm6, %xmm6 89 vfmadd132pd (%rdx), %xmm6, %xmm6 105 vfmadd132pd %xmm6, %xmm6, %xmm6 121 vfmadd132ps 485498096, %xmm6, %xmm6 141 vfmadd132ps -64(%rdx,%rax,4), %xmm6, %xmm6 145 vfmadd132ps 64(%rdx,%rax,4), %xmm6, %xmm6 [all …]
|
D | AVXAES-64.s | 9 vaesdec 485498096, %xmm6, %xmm6 21 vaesdec 64(%rdx,%rax,4), %xmm6, %xmm6 25 vaesdec -64(%rdx,%rax,4), %xmm6, %xmm6 33 vaesdec 64(%rdx,%rax), %xmm6, %xmm6 41 vaesdec 64(%rdx), %xmm6, %xmm6 49 vaesdeclast 485498096, %xmm6, %xmm6 61 vaesdeclast 64(%rdx,%rax,4), %xmm6, %xmm6 65 vaesdeclast -64(%rdx,%rax,4), %xmm6, %xmm6 73 vaesdeclast 64(%rdx,%rax), %xmm6, %xmm6 81 vaesdeclast 64(%rdx), %xmm6, %xmm6 [all …]
|
D | SHA-64.s | 9 sha1msg1 485498096, %xmm6 21 sha1msg1 64(%rdx,%rax,4), %xmm6 25 sha1msg1 -64(%rdx,%rax,4), %xmm6 33 sha1msg1 64(%rdx,%rax), %xmm6 41 sha1msg1 64(%rdx), %xmm6 49 sha1msg1 (%rdx), %xmm6 57 sha1msg1 %xmm6, %xmm6 65 sha1msg2 485498096, %xmm6 77 sha1msg2 64(%rdx,%rax,4), %xmm6 81 sha1msg2 -64(%rdx,%rax,4), %xmm6 [all …]
|
D | AES-64.s | 9 aesdec 485498096, %xmm6 21 aesdec 64(%rdx,%rax,4), %xmm6 25 aesdec -64(%rdx,%rax,4), %xmm6 33 aesdec 64(%rdx,%rax), %xmm6 41 aesdec 64(%rdx), %xmm6 49 aesdeclast 485498096, %xmm6 61 aesdeclast 64(%rdx,%rax,4), %xmm6 65 aesdeclast -64(%rdx,%rax,4), %xmm6 73 aesdeclast 64(%rdx,%rax), %xmm6 81 aesdeclast 64(%rdx), %xmm6 [all …]
|
/external/flac/libFLAC/ |
D | lpc_intrin_sse2.c | 422 __m128i xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7; in FLAC__lpc_compute_residual_from_qlp_coefficients_intrin_sse2() local 447 xmm6 = _mm_loadl_epi64((const __m128i*)(data+i-10)); in FLAC__lpc_compute_residual_from_qlp_coefficients_intrin_sse2() 448 xmm6 = _mm_shuffle_epi32(xmm6, _MM_SHUFFLE(2,0,3,1)); in FLAC__lpc_compute_residual_from_qlp_coefficients_intrin_sse2() 449 xmm6 = _mm_mul_epu32(xmm6, xmm4); in FLAC__lpc_compute_residual_from_qlp_coefficients_intrin_sse2() 450 xmm7 = _mm_add_epi32(xmm7, xmm6); in FLAC__lpc_compute_residual_from_qlp_coefficients_intrin_sse2() 454 xmm6 = _mm_loadl_epi64((const __m128i*)(data+i-8)); in FLAC__lpc_compute_residual_from_qlp_coefficients_intrin_sse2() 455 xmm6 = _mm_shuffle_epi32(xmm6, _MM_SHUFFLE(2,0,3,1)); in FLAC__lpc_compute_residual_from_qlp_coefficients_intrin_sse2() 456 xmm6 = _mm_mul_epu32(xmm6, xmm3); in FLAC__lpc_compute_residual_from_qlp_coefficients_intrin_sse2() 457 xmm7 = _mm_add_epi32(xmm7, xmm6); in FLAC__lpc_compute_residual_from_qlp_coefficients_intrin_sse2() 461 xmm6 = _mm_loadl_epi64((const __m128i*)(data+i-6)); in FLAC__lpc_compute_residual_from_qlp_coefficients_intrin_sse2() [all …]
|
D | lpc_intrin_sse41.c | 69 __m128i xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7; in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41() local 94 xmm6 = _mm_loadl_epi64((const __m128i*)(data+i-10)); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41() 95 xmm6 = _mm_shuffle_epi32(xmm6, _MM_SHUFFLE(2,0,3,1)); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41() 96 xmm6 = _mm_mul_epi32(xmm6, xmm4); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41() 97 xmm7 = _mm_add_epi64(xmm7, xmm6); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41() 101 xmm6 = _mm_loadl_epi64((const __m128i*)(data+i-8)); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41() 102 xmm6 = _mm_shuffle_epi32(xmm6, _MM_SHUFFLE(2,0,3,1)); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41() 103 xmm6 = _mm_mul_epi32(xmm6, xmm3); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41() 104 xmm7 = _mm_add_epi64(xmm7, xmm6); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41() 108 xmm6 = _mm_loadl_epi64((const __m128i*)(data+i-6)); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41() [all …]
|
/external/boringssl/linux-x86_64/crypto/cipher_extra/ |
D | aes128gcmsiv-x86_64.S | 156 vpclmulqdq $0x10,(%rdi,%r11,1),%xmm0,%xmm6 157 vpxor %xmm6,%xmm5,%xmm5 171 vpclmulqdq $0x00,(%rdi,%r11,1),%xmm0,%xmm6 172 vpxor %xmm6,%xmm3,%xmm3 173 vpclmulqdq $0x11,(%rdi,%r11,1),%xmm0,%xmm6 174 vpxor %xmm6,%xmm4,%xmm4 175 vpclmulqdq $0x01,(%rdi,%r11,1),%xmm0,%xmm6 176 vpxor %xmm6,%xmm5,%xmm5 177 vpclmulqdq $0x10,(%rdi,%r11,1),%xmm0,%xmm6 178 vpxor %xmm6,%xmm5,%xmm5 [all …]
|
/external/boringssl/mac-x86_64/crypto/cipher_extra/ |
D | aes128gcmsiv-x86_64.S | 156 vpclmulqdq $0x10,(%rdi,%r11,1),%xmm0,%xmm6 157 vpxor %xmm6,%xmm5,%xmm5 171 vpclmulqdq $0x00,(%rdi,%r11,1),%xmm0,%xmm6 172 vpxor %xmm6,%xmm3,%xmm3 173 vpclmulqdq $0x11,(%rdi,%r11,1),%xmm0,%xmm6 174 vpxor %xmm6,%xmm4,%xmm4 175 vpclmulqdq $0x01,(%rdi,%r11,1),%xmm0,%xmm6 176 vpxor %xmm6,%xmm5,%xmm5 177 vpclmulqdq $0x10,(%rdi,%r11,1),%xmm0,%xmm6 178 vpxor %xmm6,%xmm5,%xmm5 [all …]
|
/external/swiftshader/third_party/llvm-7.0/llvm/test/CodeGen/X86/ |
D | vector-interleave.ll | 29 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xm… 30 ; SSE-NEXT: punpckhwd {{.*#+}} xmm7 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xm… 31 ; SSE-NEXT: movdqa %xmm4, %xmm6 32 ; SSE-NEXT: punpckhdq {{.*#+}} xmm6 = xmm6[2],xmm7[2],xmm6[3],xmm7[3] 47 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm6[0],xmm4[1],xmm6[1],xmm4[2],xmm6[2],xmm4[3],xm… 48 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm6[4],xmm3[5],xmm6[5],xmm3[6],xmm6[6],xmm3[7],xm… 72 … AVX1-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],… 73 ; AVX1-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],… 74 ; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm7 = xmm4[2],xmm6[2],xmm4[3],xmm6[3] 75 ; AVX1-NEXT: vpunpckldq {{.*#+}} xmm4 = xmm4[0],xmm6[0],xmm4[1],xmm6[1] [all …]
|
D | vector-bitreverse.ll | 564 ; SSE2-NEXT: movdqa {{.*#+}} xmm6 = [85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85] 565 ; SSE2-NEXT: pand %xmm6, %xmm0 586 ; SSE2-NEXT: pand %xmm6, %xmm1 597 ; SSSE3-NEXT: movdqa %xmm5, %xmm6 598 ; SSSE3-NEXT: pshufb %xmm2, %xmm6 604 ; SSSE3-NEXT: por %xmm6, %xmm3 700 ; SSE2-NEXT: movdqa {{.*#+}} xmm6 = [240,240,240,240,240,240,240,240,240,240,240,240,240,240,240… 701 ; SSE2-NEXT: pand %xmm6, %xmm3 702 ; SSE2-NEXT: pand %xmm6, %xmm0 739 ; SSE2-NEXT: pand %xmm6, %xmm4 [all …]
|
D | vector-trunc-ssat.ll | 25 ; SSE2-NEXT: movdqa %xmm5, %xmm6 26 ; SSE2-NEXT: pcmpgtd %xmm3, %xmm6 27 ; SSE2-NEXT: pshufd {{.*#+}} xmm7 = xmm6[0,0,2,2] 31 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm6[1,1,3,3] 40 ; SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm4[0,0,2,2] 43 ; SSE2-NEXT: pand %xmm6, %xmm0 53 ; SSE2-NEXT: movdqa %xmm0, %xmm6 54 ; SSE2-NEXT: pcmpgtd %xmm5, %xmm6 55 ; SSE2-NEXT: pshufd {{.*#+}} xmm7 = xmm6[0,0,2,2] 59 ; SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,1,3,3] [all …]
|
/external/boringssl/src/crypto/fipsmodule/aes/asm/ |
D | vpaes-x86_64.pl | 88 ## Preserves %xmm6 - %xmm8 so you get some local vectors 181 ## AES-encrypt %xmm0 and %xmm6 in parallel. 184 ## %xmm0 and %xmm6 = input 188 ## Output in %xmm0 and %xmm6 202 ## as before. The second uses %xmm6-%xmm8,%xmm11-%xmm13. (Add 6 to %xmm2 and 219 pandn %xmm6, %xmm7 225 pand %xmm9, %xmm6 227 pshufb %xmm6, %xmm8 229 movdqa %xmm0, %xmm6 231 pshufb %xmm7, %xmm6 [all …]
|
/external/boringssl/win-x86_64/crypto/cipher_extra/ |
D | aes128gcmsiv-x86_64.asm | 185 vpclmulqdq xmm6,xmm0,XMMWORD[r11*1+rdi],0x10 186 vpxor xmm5,xmm5,xmm6 200 vpclmulqdq xmm6,xmm0,XMMWORD[r11*1+rdi],0x00 201 vpxor xmm3,xmm3,xmm6 202 vpclmulqdq xmm6,xmm0,XMMWORD[r11*1+rdi],0x11 203 vpxor xmm4,xmm4,xmm6 204 vpclmulqdq xmm6,xmm0,XMMWORD[r11*1+rdi],0x01 205 vpxor xmm5,xmm5,xmm6 206 vpclmulqdq xmm6,xmm0,XMMWORD[r11*1+rdi],0x10 207 vpxor xmm5,xmm5,xmm6 [all …]
|
/external/boringssl/linux-x86_64/crypto/fipsmodule/ |
D | ghash-ssse3-x86_64.S | 51 movdqa %xmm2,%xmm6 53 movdqa %xmm6,%xmm3 70 movdqa %xmm5,%xmm6 71 pslldq $8,%xmm6 72 pxor %xmm6,%xmm3 99 movdqa %xmm2,%xmm6 101 movdqa %xmm6,%xmm3 118 movdqa %xmm5,%xmm6 119 pslldq $8,%xmm6 120 pxor %xmm6,%xmm3 [all …]
|
D | aesni-gcm-x86_64.S | 47 vpclmulqdq $0x01,%xmm3,%xmm7,%xmm6 80 vpxor %xmm5,%xmm6,%xmm6 108 vpxor %xmm1,%xmm6,%xmm6 111 vpxor %xmm2,%xmm6,%xmm6 126 vpxor %xmm2,%xmm6,%xmm6 129 vpxor %xmm3,%xmm6,%xmm6 148 vpxor %xmm3,%xmm6,%xmm6 151 vpxor %xmm5,%xmm6,%xmm6 170 vpxor %xmm5,%xmm6,%xmm6 173 vpxor %xmm1,%xmm6,%xmm6 [all …]
|
/external/boringssl/mac-x86_64/crypto/fipsmodule/ |
D | ghash-ssse3-x86_64.S | 51 movdqa %xmm2,%xmm6 53 movdqa %xmm6,%xmm3 70 movdqa %xmm5,%xmm6 71 pslldq $8,%xmm6 72 pxor %xmm6,%xmm3 99 movdqa %xmm2,%xmm6 101 movdqa %xmm6,%xmm3 118 movdqa %xmm5,%xmm6 119 pslldq $8,%xmm6 120 pxor %xmm6,%xmm3 [all …]
|
D | aesni-gcm-x86_64.S | 47 vpclmulqdq $0x01,%xmm3,%xmm7,%xmm6 80 vpxor %xmm5,%xmm6,%xmm6 108 vpxor %xmm1,%xmm6,%xmm6 111 vpxor %xmm2,%xmm6,%xmm6 126 vpxor %xmm2,%xmm6,%xmm6 129 vpxor %xmm3,%xmm6,%xmm6 148 vpxor %xmm3,%xmm6,%xmm6 151 vpxor %xmm5,%xmm6,%xmm6 170 vpxor %xmm5,%xmm6,%xmm6 173 vpxor %xmm1,%xmm6,%xmm6 [all …]
|
/external/boringssl/linux-x86/crypto/fipsmodule/ |
D | ghash-ssse3-x86.S | 38 movdqa %xmm2,%xmm6 40 movdqa %xmm6,%xmm3 48 movdqa %xmm5,%xmm6 49 pslldq $8,%xmm6 50 pxor %xmm6,%xmm3 69 movdqa %xmm2,%xmm6 71 movdqa %xmm6,%xmm3 79 movdqa %xmm5,%xmm6 80 pslldq $8,%xmm6 81 pxor %xmm6,%xmm3 [all …]
|
/external/boringssl/mac-x86/crypto/fipsmodule/ |
D | ghash-ssse3-x86.S | 37 movdqa %xmm2,%xmm6 39 movdqa %xmm6,%xmm3 47 movdqa %xmm5,%xmm6 48 pslldq $8,%xmm6 49 pxor %xmm6,%xmm3 68 movdqa %xmm2,%xmm6 70 movdqa %xmm6,%xmm3 78 movdqa %xmm5,%xmm6 79 pslldq $8,%xmm6 80 pxor %xmm6,%xmm3 [all …]
|
/external/boringssl/win-x86_64/crypto/fipsmodule/ |
D | ghash-ssse3-x86_64.asm | 27 movdqa XMMWORD[rsp],xmm6 56 movdqa xmm6,xmm2 58 movdqa xmm3,xmm6 75 movdqa xmm6,xmm5 76 pslldq xmm6,8 77 pxor xmm3,xmm6 104 movdqa xmm6,xmm2 106 movdqa xmm3,xmm6 123 movdqa xmm6,xmm5 124 pslldq xmm6,8 [all …]
|
/external/boringssl/win-x86/crypto/fipsmodule/ |
D | ghash-ssse3-x86.asm | 50 movdqa xmm6,xmm2 52 movdqa xmm3,xmm6 60 movdqa xmm6,xmm5 61 pslldq xmm6,8 62 pxor xmm3,xmm6 81 movdqa xmm6,xmm2 83 movdqa xmm3,xmm6 91 movdqa xmm6,xmm5 92 pslldq xmm6,8 93 pxor xmm3,xmm6 [all …]
|
/external/libaom/libaom/third_party/libyuv/source/ |
D | rotate_win.cc | 58 movq xmm6, qword ptr [eax] in TransposeWx8_SSSE3() 61 punpcklbw xmm6, xmm7 in TransposeWx8_SSSE3() 63 movdqa xmm7, xmm6 in TransposeWx8_SSSE3() 72 punpcklwd xmm4, xmm6 in TransposeWx8_SSSE3() 74 movdqa xmm6, xmm4 in TransposeWx8_SSSE3() 76 palignr xmm6, xmm6, 8 in TransposeWx8_SSSE3() 86 punpckldq xmm2, xmm6 in TransposeWx8_SSSE3() 87 movdqa xmm6, xmm2 in TransposeWx8_SSSE3() 88 palignr xmm6, xmm6, 8 in TransposeWx8_SSSE3() 91 movq qword ptr [edx + esi], xmm6 in TransposeWx8_SSSE3() [all …]
|
/external/libyuv/files/source/ |
D | rotate_win.cc | 59 movq xmm6, qword ptr [eax] in TransposeWx8_SSSE3() 62 punpcklbw xmm6, xmm7 in TransposeWx8_SSSE3() 64 movdqa xmm7, xmm6 in TransposeWx8_SSSE3() 73 punpcklwd xmm4, xmm6 in TransposeWx8_SSSE3() 75 movdqa xmm6, xmm4 in TransposeWx8_SSSE3() 77 palignr xmm6, xmm6, 8 in TransposeWx8_SSSE3() 87 punpckldq xmm2, xmm6 in TransposeWx8_SSSE3() 88 movdqa xmm6, xmm2 in TransposeWx8_SSSE3() 89 palignr xmm6, xmm6, 8 in TransposeWx8_SSSE3() 92 movq qword ptr [edx + esi], xmm6 in TransposeWx8_SSSE3() [all …]
|
/external/libvpx/libvpx/third_party/libyuv/source/ |
D | rotate_win.cc | 59 movq xmm6, qword ptr [eax] in TransposeWx8_SSSE3() 62 punpcklbw xmm6, xmm7 in TransposeWx8_SSSE3() 64 movdqa xmm7, xmm6 in TransposeWx8_SSSE3() 73 punpcklwd xmm4, xmm6 in TransposeWx8_SSSE3() 75 movdqa xmm6, xmm4 in TransposeWx8_SSSE3() 77 palignr xmm6, xmm6, 8 in TransposeWx8_SSSE3() 87 punpckldq xmm2, xmm6 in TransposeWx8_SSSE3() 88 movdqa xmm6, xmm2 in TransposeWx8_SSSE3() 89 palignr xmm6, xmm6, 8 in TransposeWx8_SSSE3() 92 movq qword ptr [edx + esi], xmm6 in TransposeWx8_SSSE3() [all …]
|