/arch/x86/crypto/ |
D | sm4_aesni_avx_glue.c | 44 unsigned int nbytes; in ecb_do_crypt() local 49 while ((nbytes = walk.nbytes) > 0) { in ecb_do_crypt() 54 while (nbytes >= SM4_CRYPT8_BLOCK_SIZE) { in ecb_do_crypt() 58 nbytes -= SM4_CRYPT8_BLOCK_SIZE; in ecb_do_crypt() 60 while (nbytes >= SM4_BLOCK_SIZE) { in ecb_do_crypt() 61 unsigned int nblocks = min(nbytes >> 4, 4u); in ecb_do_crypt() 65 nbytes -= nblocks * SM4_BLOCK_SIZE; in ecb_do_crypt() 69 err = skcipher_walk_done(&walk, nbytes); in ecb_do_crypt() 98 unsigned int nbytes; in sm4_cbc_encrypt() local 103 while ((nbytes = walk.nbytes) > 0) { in sm4_cbc_encrypt() [all …]
|
D | des3_ede_glue.c | 70 unsigned int nbytes; in ecb_crypt() local 75 while ((nbytes = walk.nbytes)) { in ecb_crypt() 80 if (nbytes >= bsize * 3) { in ecb_crypt() 87 nbytes -= bsize * 3; in ecb_crypt() 88 } while (nbytes >= bsize * 3); in ecb_crypt() 90 if (nbytes < bsize) in ecb_crypt() 100 nbytes -= bsize; in ecb_crypt() 101 } while (nbytes >= bsize); in ecb_crypt() 104 err = skcipher_walk_done(&walk, nbytes); in ecb_crypt() 130 unsigned int nbytes = walk->nbytes; in __cbc_encrypt() local [all …]
|
D | aria_gfni_avx512_glue.c | 81 unsigned int nbytes; in aria_avx512_ctr_encrypt() local 86 while ((nbytes = walk.nbytes) > 0) { in aria_avx512_ctr_encrypt() 90 while (nbytes >= ARIA_GFNI_AVX512_PARALLEL_BLOCK_SIZE) { in aria_avx512_ctr_encrypt() 98 nbytes -= ARIA_GFNI_AVX512_PARALLEL_BLOCK_SIZE; in aria_avx512_ctr_encrypt() 101 while (nbytes >= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE) { in aria_avx512_ctr_encrypt() 109 nbytes -= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE; in aria_avx512_ctr_encrypt() 112 while (nbytes >= ARIA_AESNI_PARALLEL_BLOCK_SIZE) { in aria_avx512_ctr_encrypt() 120 nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE; in aria_avx512_ctr_encrypt() 123 while (nbytes >= ARIA_BLOCK_SIZE) { in aria_avx512_ctr_encrypt() 135 nbytes -= ARIA_BLOCK_SIZE; in aria_avx512_ctr_encrypt() [all …]
|
D | aesni-intel_glue.c | 296 unsigned int nbytes; in ecb_encrypt() local 301 while ((nbytes = walk.nbytes)) { in ecb_encrypt() 304 nbytes & AES_BLOCK_MASK); in ecb_encrypt() 306 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt() 307 err = skcipher_walk_done(&walk, nbytes); in ecb_encrypt() 318 unsigned int nbytes; in ecb_decrypt() local 323 while ((nbytes = walk.nbytes)) { in ecb_decrypt() 326 nbytes & AES_BLOCK_MASK); in ecb_decrypt() 328 nbytes &= AES_BLOCK_SIZE - 1; in ecb_decrypt() 329 err = skcipher_walk_done(&walk, nbytes); in ecb_decrypt() [all …]
|
D | ecb_cbc_helpers.h | 20 while (walk.nbytes > 0) { \ 21 unsigned int nbytes = walk.nbytes; \ 23 nbytes >= __fpu_blocks * __bsize; \ 35 nbytes -= (blocks) * __bsize; \ 44 while (nbytes >= __blocks * __bsize) { \ 52 while (nbytes >= __bsize) { \ 67 while (nbytes >= __blocks * __bsize) { \ 80 err = skcipher_walk_done(&walk, nbytes); \
|
D | aria_aesni_avx2_glue.c | 94 unsigned int nbytes; in aria_avx2_ctr_encrypt() local 99 while ((nbytes = walk.nbytes) > 0) { in aria_avx2_ctr_encrypt() 103 while (nbytes >= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE) { in aria_avx2_ctr_encrypt() 111 nbytes -= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE; in aria_avx2_ctr_encrypt() 114 while (nbytes >= ARIA_AESNI_PARALLEL_BLOCK_SIZE) { in aria_avx2_ctr_encrypt() 122 nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE; in aria_avx2_ctr_encrypt() 125 while (nbytes >= ARIA_BLOCK_SIZE) { in aria_avx2_ctr_encrypt() 136 nbytes -= ARIA_BLOCK_SIZE; in aria_avx2_ctr_encrypt() 139 if (walk.nbytes == walk.total && nbytes > 0) { in aria_avx2_ctr_encrypt() 148 nbytes); in aria_avx2_ctr_encrypt() [all …]
|
D | aria_aesni_avx_glue.c | 92 unsigned int nbytes; in aria_avx_ctr_encrypt() local 97 while ((nbytes = walk.nbytes) > 0) { in aria_avx_ctr_encrypt() 101 while (nbytes >= ARIA_AESNI_PARALLEL_BLOCK_SIZE) { in aria_avx_ctr_encrypt() 109 nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE; in aria_avx_ctr_encrypt() 112 while (nbytes >= ARIA_BLOCK_SIZE) { in aria_avx_ctr_encrypt() 123 nbytes -= ARIA_BLOCK_SIZE; in aria_avx_ctr_encrypt() 126 if (walk.nbytes == walk.total && nbytes > 0) { in aria_avx_ctr_encrypt() 135 nbytes); in aria_avx_ctr_encrypt() 136 dst += nbytes; in aria_avx_ctr_encrypt() 137 src += nbytes; in aria_avx_ctr_encrypt() [all …]
|
/arch/arm64/crypto/ |
D | sm4-neon-glue.c | 41 unsigned int nbytes; in sm4_ecb_do_crypt() local 46 while ((nbytes = walk.nbytes) > 0) { in sm4_ecb_do_crypt() 51 nblocks = nbytes / SM4_BLOCK_SIZE; in sm4_ecb_do_crypt() 60 err = skcipher_walk_done(&walk, nbytes % SM4_BLOCK_SIZE); in sm4_ecb_do_crypt() 87 unsigned int nbytes; in sm4_cbc_encrypt() local 92 while ((nbytes = walk.nbytes) > 0) { in sm4_cbc_encrypt() 97 while (nbytes >= SM4_BLOCK_SIZE) { in sm4_cbc_encrypt() 103 nbytes -= SM4_BLOCK_SIZE; in sm4_cbc_encrypt() 108 err = skcipher_walk_done(&walk, nbytes); in sm4_cbc_encrypt() 119 unsigned int nbytes; in sm4_cbc_decrypt() local [all …]
|
D | aes-neonbs-glue.c | 105 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt() 106 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt() 108 if (walk.nbytes < walk.total) in __ecb_crypt() 117 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt() 165 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_encrypt() 166 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_encrypt() 174 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt() 188 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt() 189 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_decrypt() 191 if (walk.nbytes < walk.total) in cbc_decrypt() [all …]
|
D | sm4-ce-glue.c | 25 #define BYTES2BLKS(nbytes) ((nbytes) >> 4) argument 37 u8 *iv, unsigned int nbytes); 39 u8 *iv, unsigned int nbytes); 47 u8 *tweak, unsigned int nbytes, 50 u8 *tweak, unsigned int nbytes, 117 unsigned int nbytes; in sm4_ecb_do_crypt() local 122 while ((nbytes = walk.nbytes) > 0) { in sm4_ecb_do_crypt() 129 nblks = BYTES2BLKS(nbytes); in sm4_ecb_do_crypt() 132 nbytes -= nblks * SM4_BLOCK_SIZE; in sm4_ecb_do_crypt() 137 err = skcipher_walk_done(&walk, nbytes); in sm4_ecb_do_crypt() [all …]
|
D | ghash-ce-glue.c | 360 int nbytes = walk.nbytes; in gcm_encrypt() local 364 if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) { in gcm_encrypt() 365 src = dst = memcpy(buf + sizeof(buf) - nbytes, in gcm_encrypt() 366 src, nbytes); in gcm_encrypt() 367 } else if (nbytes < walk.total) { in gcm_encrypt() 368 nbytes &= ~(AES_BLOCK_SIZE - 1); in gcm_encrypt() 373 pmull_gcm_encrypt(nbytes, dst, src, ctx->ghash_key.h, in gcm_encrypt() 378 if (unlikely(!nbytes)) in gcm_encrypt() 381 if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) in gcm_encrypt() 383 buf + sizeof(buf) - nbytes, nbytes); in gcm_encrypt() [all …]
|
D | aes-glue.c | 189 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt() 194 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 209 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt() 214 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() 227 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt_walk() 232 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_encrypt_walk() 256 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt_walk() 261 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_decrypt_walk() 328 ctx->key_enc, rounds, walk.nbytes, walk.iv); in cts_cbc_encrypt() 385 ctx->key_dec, rounds, walk.nbytes, walk.iv); in cts_cbc_decrypt() [all …]
|
/arch/s390/crypto/ |
D | chacha-glue.c | 22 unsigned int nbytes, const u32 *key, in chacha20_crypt_s390() argument 28 chacha20_vx(dst, src, nbytes, key, counter); in chacha20_crypt_s390() 31 *counter += round_up(nbytes, CHACHA_BLOCK_SIZE) / CHACHA_BLOCK_SIZE; in chacha20_crypt_s390() 40 unsigned int nbytes; in chacha20_s390() local 46 while (walk.nbytes > 0) { in chacha20_s390() 47 nbytes = walk.nbytes; in chacha20_s390() 48 if (nbytes < walk.total) in chacha20_s390() 49 nbytes = round_down(nbytes, walk.stride); in chacha20_s390() 51 if (nbytes <= CHACHA_BLOCK_SIZE) { in chacha20_s390() 53 walk.src.virt.addr, nbytes, in chacha20_s390() [all …]
|
D | prng.c | 135 static int generate_entropy(u8 *ebuf, size_t nbytes) in generate_entropy() argument 160 while (nbytes) { in generate_entropy() 171 n = (nbytes < 64) ? nbytes : 64; in generate_entropy() 175 nbytes -= n; in generate_entropy() 201 static void prng_tdes_seed(int nbytes) in prng_tdes_seed() argument 206 BUG_ON(nbytes > sizeof(buf)); in prng_tdes_seed() 208 get_random_bytes(buf, nbytes); in prng_tdes_seed() 211 while (nbytes >= 8) { in prng_tdes_seed() 215 nbytes -= 8; in prng_tdes_seed() 476 static int prng_sha512_generate(u8 *buf, size_t nbytes) in prng_sha512_generate() argument [all …]
|
D | des_s390.c | 94 unsigned int nbytes, n; in ecb_desall_crypt() local 98 while ((nbytes = walk.nbytes) != 0) { in ecb_desall_crypt() 100 n = nbytes & ~(DES_BLOCK_SIZE - 1); in ecb_desall_crypt() 103 ret = skcipher_walk_done(&walk, nbytes - n); in ecb_desall_crypt() 113 unsigned int nbytes, n; in cbc_desall_crypt() local 125 while ((nbytes = walk.nbytes) != 0) { in cbc_desall_crypt() 127 n = nbytes & ~(DES_BLOCK_SIZE - 1); in cbc_desall_crypt() 131 ret = skcipher_walk_done(&walk, nbytes - n); in cbc_desall_crypt() 303 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes) in __ctrblk_init() argument 308 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(DES_BLOCK_SIZE - 1); in __ctrblk_init() [all …]
|
D | aes_s390.c | 69 unsigned int nbytes; member 227 unsigned int nbytes, n; in ecb_aes_crypt() local 234 while ((nbytes = walk.nbytes) != 0) { in ecb_aes_crypt() 236 n = nbytes & ~(AES_BLOCK_SIZE - 1); in ecb_aes_crypt() 239 ret = skcipher_walk_done(&walk, nbytes - n); in ecb_aes_crypt() 323 unsigned int nbytes, n; in cbc_aes_crypt() local 338 while ((nbytes = walk.nbytes) != 0) { in cbc_aes_crypt() 340 n = nbytes & ~(AES_BLOCK_SIZE - 1); in cbc_aes_crypt() 344 ret = skcipher_walk_done(&walk, nbytes - n); in cbc_aes_crypt() 423 unsigned int offset, nbytes, n; in xts_aes_crypt() local [all …]
|
/arch/powerpc/crypto/ |
D | aes-spe-glue.c | 185 unsigned int nbytes; in ppc_ecb_crypt() local 190 while ((nbytes = walk.nbytes) != 0) { in ppc_ecb_crypt() 191 nbytes = min_t(unsigned int, nbytes, MAX_BYTES); in ppc_ecb_crypt() 192 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_ecb_crypt() 197 ctx->key_enc, ctx->rounds, nbytes); in ppc_ecb_crypt() 200 ctx->key_dec, ctx->rounds, nbytes); in ppc_ecb_crypt() 203 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in ppc_ecb_crypt() 224 unsigned int nbytes; in ppc_cbc_crypt() local 229 while ((nbytes = walk.nbytes) != 0) { in ppc_cbc_crypt() 230 nbytes = min_t(unsigned int, nbytes, MAX_BYTES); in ppc_cbc_crypt() [all …]
|
/arch/riscv/crypto/ |
D | chacha-riscv64-glue.c | 26 unsigned int nbytes; in riscv64_chacha20_crypt() local 36 while (walk.nbytes) { in riscv64_chacha20_crypt() 37 nbytes = walk.nbytes & ~(CHACHA_BLOCK_SIZE - 1); in riscv64_chacha20_crypt() 38 tail_bytes = walk.nbytes & (CHACHA_BLOCK_SIZE - 1); in riscv64_chacha20_crypt() 40 if (nbytes) { in riscv64_chacha20_crypt() 42 walk.dst.virt.addr, nbytes, iv); in riscv64_chacha20_crypt() 43 iv[0] += nbytes / CHACHA_BLOCK_SIZE; in riscv64_chacha20_crypt() 45 if (walk.nbytes == walk.total && tail_bytes > 0) { in riscv64_chacha20_crypt() 46 memcpy(block_buffer, walk.src.virt.addr + nbytes, in riscv64_chacha20_crypt() 50 memcpy(walk.dst.virt.addr + nbytes, block_buffer, in riscv64_chacha20_crypt()
|
D | aes-riscv64-glue.c | 140 unsigned int nbytes; in riscv64_aes_ecb_crypt() local 144 while ((nbytes = walk.nbytes) != 0) { in riscv64_aes_ecb_crypt() 149 nbytes & ~(AES_BLOCK_SIZE - 1)); in riscv64_aes_ecb_crypt() 153 nbytes & ~(AES_BLOCK_SIZE - 1)); in riscv64_aes_ecb_crypt() 155 err = skcipher_walk_done(&walk, nbytes & (AES_BLOCK_SIZE - 1)); in riscv64_aes_ecb_crypt() 178 unsigned int nbytes; in riscv64_aes_cbc_crypt() local 182 while ((nbytes = walk.nbytes) != 0) { in riscv64_aes_cbc_crypt() 187 nbytes & ~(AES_BLOCK_SIZE - 1), in riscv64_aes_cbc_crypt() 192 nbytes & ~(AES_BLOCK_SIZE - 1), in riscv64_aes_cbc_crypt() 195 err = skcipher_walk_done(&walk, nbytes & (AES_BLOCK_SIZE - 1)); in riscv64_aes_cbc_crypt() [all …]
|
/arch/x86/include/asm/ |
D | insn.h | 23 unsigned char nbytes; member 30 p->nbytes = n; in insn_field_set() 49 unsigned char nbytes; member 57 p->nbytes = n; in insn_field_set() 175 return (insn->vex_prefix.nbytes == 4); in insn_is_evex() 185 if (insn->vex_prefix.nbytes == 2) /* 2 bytes VEX */ in insn_vex_m_bits() 187 else if (insn->vex_prefix.nbytes == 3) /* 3 bytes VEX */ in insn_vex_m_bits() 195 if (insn->vex_prefix.nbytes == 2) /* 2 bytes VEX */ in insn_vex_p_bits() 216 return insn->prefixes.nbytes; in insn_offset_rex_prefix() 220 return insn_offset_rex_prefix(insn) + insn->rex_prefix.nbytes; in insn_offset_vex_prefix() [all …]
|
/arch/sparc/crypto/ |
D | aes_glue.c | 224 unsigned int nbytes; in ecb_encrypt() local 232 while ((nbytes = walk.nbytes) != 0) { in ecb_encrypt() 235 round_down(nbytes, AES_BLOCK_SIZE)); in ecb_encrypt() 236 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 248 unsigned int nbytes; in ecb_decrypt() local 257 while ((nbytes = walk.nbytes) != 0) { in ecb_decrypt() 260 round_down(nbytes, AES_BLOCK_SIZE)); in ecb_decrypt() 261 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in ecb_decrypt() 273 unsigned int nbytes; in cbc_encrypt() local 281 while ((nbytes = walk.nbytes) != 0) { in cbc_encrypt() [all …]
|
D | camellia_glue.c | 95 unsigned int nbytes; in __ecb_crypt() local 111 while ((nbytes = walk.nbytes) != 0) { in __ecb_crypt() 113 round_down(nbytes, CAMELLIA_BLOCK_SIZE), key); in __ecb_crypt() 114 err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE); in __ecb_crypt() 145 unsigned int nbytes; in cbc_encrypt() local 158 while ((nbytes = walk.nbytes) != 0) { in cbc_encrypt() 160 round_down(nbytes, CAMELLIA_BLOCK_SIZE), key, walk.iv); in cbc_encrypt() 161 err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE); in cbc_encrypt() 174 unsigned int nbytes; in cbc_decrypt() local 187 while ((nbytes = walk.nbytes) != 0) { in cbc_decrypt() [all …]
|
D | des_glue.c | 100 unsigned int nbytes; in __ecb_crypt() local 111 while ((nbytes = walk.nbytes) != 0) { in __ecb_crypt() 113 round_down(nbytes, DES_BLOCK_SIZE)); in __ecb_crypt() 114 err = skcipher_walk_done(&walk, nbytes % DES_BLOCK_SIZE); in __ecb_crypt() 141 unsigned int nbytes; in __cbc_crypt() local 152 while ((nbytes = walk.nbytes) != 0) { in __cbc_crypt() 156 round_down(nbytes, in __cbc_crypt() 162 round_down(nbytes, in __cbc_crypt() 165 err = skcipher_walk_done(&walk, nbytes % DES_BLOCK_SIZE); in __cbc_crypt() 250 unsigned int nbytes; in __ecb3_crypt() local [all …]
|
/arch/arm/crypto/ |
D | aes-ce-glue.c | 177 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt() 182 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 197 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt() 202 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() 215 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt_walk() 221 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_encrypt_walk() 245 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt_walk() 251 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_decrypt_walk() 318 ctx->key_enc, num_rounds(ctx), walk.nbytes, in cts_cbc_encrypt() 376 ctx->key_dec, num_rounds(ctx), walk.nbytes, in cts_cbc_decrypt() [all …]
|
/arch/mips/crypto/ |
D | chacha-glue.c | 40 while (walk.nbytes > 0) { in chacha_mips_stream_xor() 41 unsigned int nbytes = walk.nbytes; in chacha_mips_stream_xor() local 43 if (nbytes < walk.total) in chacha_mips_stream_xor() 44 nbytes = round_down(nbytes, walk.stride); in chacha_mips_stream_xor() 47 nbytes, ctx->nrounds); in chacha_mips_stream_xor() 48 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in chacha_mips_stream_xor()
|