Home
last modified time | relevance | path

Searched refs:shuffle (Results 1 – 7 of 7) sorted by relevance

/arch/x86/crypto/
Dcast6-avx-x86_64-asm_64.S177 #define shuffle(mask) \ macro
275 preload_rkr(1, shuffle, .Lrkr_enc_Q_Q_QBAR_QBAR);
280 preload_rkr(2, shuffle, .Lrkr_enc_QBAR_QBAR_QBAR_QBAR);
318 preload_rkr(2, shuffle, .Lrkr_dec_Q_Q_Q_Q);
323 preload_rkr(1, shuffle, .Lrkr_dec_Q_Q_QBAR_QBAR);
328 preload_rkr(0, shuffle, .Lrkr_dec_QBAR_QBAR_QBAR_QBAR);
Dsha256-ssse3-asm.S86 SHUF_00BA = %xmm10 # shuffle xBxA -> 00BA
87 SHUF_DC00 = %xmm11 # shuffle xDxC -> DC00
505 # shuffle xBxA -> 00BA
511 # shuffle xDxC -> DC00
Dsha256-avx-asm.S93 SHUF_00BA = %xmm10 # shuffle xBxA -> 00BA
94 SHUF_DC00 = %xmm12 # shuffle xDxC -> DC00
491 # shuffle xBxA -> 00BA
497 # shuffle xDxC -> DC00
Dsha256-avx2-asm.S86 SHUF_00BA = %ymm10 # shuffle xBxA -> 00BA
87 SHUF_DC00 = %ymm12 # shuffle xDxC -> DC00
758 # shuffle xBxA -> 00BA
764 # shuffle xDxC -> DC00
Daesni-intel_avx-x86_64.S449 sub %r13, %r12 # adjust the shuffle mask pointer to be
466 # adjust the shuffle mask pointer to be able to shift 16-r13 bytes
469 # get the appropriate shuffle mask
495 vpshufb SHUF_MASK(%rip), %xmm9, %xmm9 # shuffle xmm9 back to output as ciphertext
774 # adjust the shuffle mask pointer to be able to shift r13 bytes
777 vmovdqu (%r12), %xmm2 # get the appropriate shuffle mask
854 # shuffle xmm9 back to output as ciphertext
Daesni-intel_asm.S373 # adjust the shuffle mask pointer to be able to shift 16-r13 bytes
376 # get the appropriate shuffle mask
409 # shuffle xmm0 back to output as ciphertext
665 # adjust the shuffle mask pointer to be able to shift r13 bytes
668 movdqu (%r12), %xmm2 # get the appropriate shuffle mask
745 # shuffle xmm9 back to output as ciphertext
/arch/arm/kernel/
Dentry-common.S397 stmialo sp, {r5, r6} @ shuffle args