/arch/riscv/crypto/ |
D | aes-riscv64-zvkned.S | 54 #define INP a1 macro 60 vle32.v v16, (INP) 93 vle32.v v16, (INP) 96 add INP, INP, t1 128 vle32.v v17, (INP) // Load plaintext block 132 addi INP, INP, 16 146 vle32.v v20, (INP) // Load ciphertext blocks 155 add INP, INP, t1 200 vle32.v v17, (INP) // Load plaintext block 203 addi INP, INP, 16 [all …]
|
D | aes-riscv64-zvkned-zvkb.S | 53 #define INP a1 macro 113 vle8.v v20, (INP) 118 add INP, INP, t0
|
D | aes-riscv64-zvkned-zvbb-zvkg.S | 54 #define INP a1 macro 178 vle32.v TMP0, (INP) 186 add INP, INP, t0 245 vle32.v TMP0, (INP) 253 addi t0, INP, 16
|
D | chacha-riscv64-zvkb.S | 50 #define INP a1 macro 204 vlsseg8e32.v v16, (INP), STRIDE 219 addi TMP, INP, 32 277 add INP, INP, TMP
|
/arch/s390/crypto/ |
D | chacha-s390.S | 44 #define INP %r3 macro 293 VLM XT0,XT3,0,INP,0 302 la INP,0x40(INP) 319 VLM XT0,XT3,0,INP,0 328 la INP,0x40(INP) 346 VLM XT0,XT3,0,INP,0 355 la INP,0x40(INP) 373 VLM XT0,XT3,0,INP,0 398 llgc %r5,0(%r1,INP) 410 #undef INP [all …]
|
/arch/x86/crypto/ |
D | sha256-avx2-asm.S | 94 INP = %rsi # 2nd arg define 99 y3 = %esi # clobbers INP 542 lea -64(INP, NUM_BLKS), NUM_BLKS # pointer to last block 545 cmp NUM_BLKS, INP 566 VMOVDQ 0*32(INP),XTMP0 567 VMOVDQ 1*32(INP),XTMP1 568 VMOVDQ 2*32(INP),XTMP2 569 VMOVDQ 3*32(INP),XTMP3 584 add $64, INP 585 mov INP, _INP(%rsp) [all …]
|
D | aesni-intel_asm.S | 152 #define INP %rdx macro 165 #define INP %edx macro 1947 movl (FRAME_OFFSET+20)(%esp), INP # src 1950 movups (INP), STATE # input 2136 movl (FRAME_OFFSET+20)(%esp), INP # src 2140 movups (INP), STATE # input 2328 movl (FRAME_OFFSET+24)(%esp), INP # src 2340 movups (INP), STATE1 2341 movups 0x10(INP), STATE2 2342 movups 0x20(INP), STATE3 [all …]
|
D | sha256-avx-asm.S | 99 INP = %rsi # 2nd arg define 102 SRND = %rsi # clobbers INP 364 add INP, NUM_BLKS # pointer to end of data 384 COPY_XMM_AND_BSWAP X0, 0*16(INP), BYTE_FLIP_MASK 385 COPY_XMM_AND_BSWAP X1, 1*16(INP), BYTE_FLIP_MASK 386 COPY_XMM_AND_BSWAP X2, 2*16(INP), BYTE_FLIP_MASK 387 COPY_XMM_AND_BSWAP X3, 3*16(INP), BYTE_FLIP_MASK 389 mov INP, _INP(%rsp) 447 mov _INP(%rsp), INP 448 add $64, INP [all …]
|
D | sha256-ssse3-asm.S | 92 INP = %rsi # 2nd arg define 95 SRND = %rsi # clobbers INP 373 add INP, NUM_BLKS 394 COPY_XMM_AND_BSWAP X0, 0*16(INP), BYTE_FLIP_MASK 395 COPY_XMM_AND_BSWAP X1, 1*16(INP), BYTE_FLIP_MASK 396 COPY_XMM_AND_BSWAP X2, 2*16(INP), BYTE_FLIP_MASK 397 COPY_XMM_AND_BSWAP X3, 3*16(INP), BYTE_FLIP_MASK 399 mov INP, _INP(%rsp) 460 mov _INP(%rsp), INP 461 add $64, INP [all …]
|
D | sha512-avx2-asm.S | 76 INP = %rsi define 585 add INP, NUM_BLKS # pointer to end of data 607 COPY_YMM_AND_BSWAP Y_0, (INP), BYTE_FLIP_MASK 608 COPY_YMM_AND_BSWAP Y_1, 1*32(INP), BYTE_FLIP_MASK 609 COPY_YMM_AND_BSWAP Y_2, 2*32(INP), BYTE_FLIP_MASK 610 COPY_YMM_AND_BSWAP Y_3, 3*32(INP), BYTE_FLIP_MASK 612 mov INP, frame_INP(%rsp) 665 mov frame_INP(%rsp), INP 666 add $128, INP 667 cmp frame_INPEND(%rsp), INP
|