/arch/x86/crypto/ |
D | cast6-avx-x86_64-asm_64.S | 56 #define RA2 %xmm4 macro 272 inpack_blocks(RA2, RB2, RC2, RD2, RTMP, RX, RKRF, RKM); 296 outunpack_blocks(RA2, RB2, RC2, RD2, RTMP, RX, RKRF, RKM); 318 inpack_blocks(RA2, RB2, RC2, RD2, RTMP, RX, RKRF, RKM); 341 outunpack_blocks(RA2, RB2, RC2, RD2, RTMP, RX, RKRF, RKM); 355 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 359 store_8way(%r11, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 373 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 377 store_8way(%r11, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 394 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); [all …]
|
D | twofish-avx-x86_64-asm_64.S | 59 #define RA2 %xmm4 macro 266 inpack_blocks(RA2, RB2, RC2, RD2, RK1, RX0, RY0, RK2); 285 outunpack_blocks(RC2, RD2, RA2, RB2, RK1, RX0, RY0, RK2); 307 inpack_blocks(RC2, RD2, RA2, RB2, RK1, RX0, RY0, RK2); 308 rotate_1l(RA2); 325 outunpack_blocks(RA2, RB2, RC2, RD2, RK1, RX0, RY0, RK2); 339 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 343 store_8way(%r11, RC1, RD1, RA1, RB1, RC2, RD2, RA2, RB2); 357 load_8way(%rdx, RC1, RD1, RA1, RB1, RC2, RD2, RA2, RB2); 361 store_8way(%r11, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); [all …]
|
D | serpent-avx-x86_64-asm_64.S | 54 #define RA2 %xmm6 macro 582 read_blocks(RA2, RB2, RC2, RD2, RK0, RK1, RK2); 619 write_blocks(RA2, RB2, RC2, RD2, RK0, RK1, RK2); 636 read_blocks(RA2, RB2, RC2, RD2, RK0, RK1, RK2); 685 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 689 store_8way(%rsi, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 701 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 717 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 734 load_ctr_8way(%rcx, .Lbswap128_mask, RA1, RB1, RC1, RD1, RA2, RB2, RC2, 739 store_ctr_8way(%rdx, %rsi, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); [all …]
|
D | serpent-avx2-asm_64.S | 40 #define RA2 %ymm3 macro 574 read_blocks(RA2, RB2, RC2, RD2, RK0, RK1, RK2); 611 write_blocks(RA2, RB2, RC2, RD2, RK0, RK1, RK2); 628 read_blocks(RA2, RB2, RC2, RD2, RK0, RK1, RK2); 679 load_16way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 683 store_16way(%rsi, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 699 load_16way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 719 load_16way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 741 load_ctr_16way(%rcx, .Lbswap128_mask, RA1, RB1, RC1, RD1, RA2, RB2, RC2, 747 store_ctr_16way(%rdx, %rsi, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); [all …]
|
D | serpent-sse2-x86_64-asm_64.S | 43 #define RA2 %xmm5 macro 649 read_blocks(%rax, RA2, RB2, RC2, RD2, RK0, RK1, RK2); 691 write_blocks(%rax, RA2, RB2, RC2, RD2, RK0, RK1, RK2); 697 xor_blocks(%rax, RA2, RB2, RC2, RD2, RK0, RK1, RK2); 713 read_blocks(%rax, RA2, RB2, RC2, RD2, RK0, RK1, RK2);
|