/arch/x86/crypto/ |
D | cast6-avx-x86_64-asm_64.S | 53 #define RC1 %xmm2 macro 271 inpack_blocks(RA1, RB1, RC1, RD1, RTMP, RX, RKRF, RKM); 295 outunpack_blocks(RA1, RB1, RC1, RD1, RTMP, RX, RKRF, RKM); 317 inpack_blocks(RA1, RB1, RC1, RD1, RTMP, RX, RKRF, RKM); 340 outunpack_blocks(RA1, RB1, RC1, RD1, RTMP, RX, RKRF, RKM); 355 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 359 store_8way(%r11, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 373 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 377 store_8way(%r11, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 394 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); [all …]
|
D | twofish-avx-x86_64-asm_64.S | 56 #define RC1 %xmm2 macro 263 inpack_blocks(RA1, RB1, RC1, RD1, RK1, RX0, RY0, RK2); 284 outunpack_blocks(RC1, RD1, RA1, RB1, RK1, RX0, RY0, RK2); 304 inpack_blocks(RC1, RD1, RA1, RB1, RK1, RX0, RY0, RK2); 305 preload_rgi(RC1); 324 outunpack_blocks(RA1, RB1, RC1, RD1, RK1, RX0, RY0, RK2); 339 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 343 store_8way(%r11, RC1, RD1, RA1, RB1, RC2, RD2, RA2, RB2); 357 load_8way(%rdx, RC1, RD1, RA1, RB1, RC2, RD2, RA2, RB2); 361 store_8way(%r11, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); [all …]
|
D | twofish-avx2-asm_64.S | 62 #define RC1 %ymm14 macro 470 load_16way(%rdx, RA0, RB0, RC0, RD0, RA1, RB1, RC1, RD1); 474 store_16way(%rsi, RA0, RB0, RC0, RD0, RA1, RB1, RC1, RD1); 492 load_16way(%rdx, RA0, RB0, RC0, RD0, RA1, RB1, RC1, RD1); 496 store_16way(%rsi, RA0, RB0, RC0, RD0, RA1, RB1, RC1, RD1); 514 load_16way(%rdx, RA0, RB0, RC0, RD0, RA1, RB1, RC1, RD1); 518 store_cbc_16way(%rdx, %rsi, RA0, RB0, RC0, RD0, RA1, RB1, RC1, RD1, 538 load_ctr_16way(%rcx, .Lbswap128_mask, RA0, RB0, RC0, RD0, RA1, RB1, RC1, 544 store_ctr_16way(%rdx, %rsi, RA0, RB0, RC0, RD0, RA1, RB1, RC1, RD1); 565 load_xts_16way(%rcx, %rdx, %rsi, RA0, RB0, RC0, RD0, RA1, RB1, RC1, [all …]
|
D | serpent-avx2-asm_64.S | 43 #define RC1 %ymm6 macro 573 read_blocks(RA1, RB1, RC1, RD1, RK0, RK1, RK2); 610 write_blocks(RA1, RB1, RC1, RD1, RK0, RK1, RK2); 627 read_blocks(RA1, RB1, RC1, RD1, RK0, RK1, RK2); 664 write_blocks(RC1, RD1, RB1, RE1, RK0, RK1, RK2); 679 load_16way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 683 store_16way(%rsi, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 699 load_16way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 703 store_16way(%rsi, RC1, RD1, RB1, RE1, RC2, RD2, RB2, RE2); 719 load_16way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); [all …]
|
D | serpent-avx-x86_64-asm_64.S | 48 #define RC1 %xmm2 macro 581 read_blocks(RA1, RB1, RC1, RD1, RK0, RK1, RK2); 618 write_blocks(RA1, RB1, RC1, RD1, RK0, RK1, RK2); 635 read_blocks(RA1, RB1, RC1, RD1, RK0, RK1, RK2); 672 write_blocks(RC1, RD1, RB1, RE1, RK0, RK1, RK2); 685 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 689 store_8way(%rsi, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 701 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); 705 store_8way(%rsi, RC1, RD1, RB1, RE1, RC2, RD2, RB2, RE2); 717 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2); [all …]
|
D | serpent-sse2-x86_64-asm_64.S | 39 #define RC1 %xmm2 macro 648 read_blocks(%rdx, RA1, RB1, RC1, RD1, RK0, RK1, RK2); 690 write_blocks(%rsi, RA1, RB1, RC1, RD1, RK0, RK1, RK2); 696 xor_blocks(%rsi, RA1, RB1, RC1, RD1, RK0, RK1, RK2); 712 read_blocks(%rdx, RA1, RB1, RC1, RD1, RK0, RK1, RK2); 750 write_blocks(%rsi, RC1, RD1, RB1, RE1, RK0, RK1, RK2);
|