/crypto/ |
D | nhpoly1305.c | 97 unsigned int bytes; in nhpoly1305_units() local 101 bytes = min_t(unsigned int, srclen, NH_MESSAGE_BYTES); in nhpoly1305_units() 102 nh_fn(key->nh_key, src, bytes, state->nh_hash); in nhpoly1305_units() 103 state->nh_remaining = NH_MESSAGE_BYTES - bytes; in nhpoly1305_units() 111 bytes = min(srclen, state->nh_remaining); in nhpoly1305_units() 112 nh_fn(&key->nh_key[pos / 4], src, bytes, tmp_hash); in nhpoly1305_units() 116 state->nh_remaining -= bytes; in nhpoly1305_units() 120 src += bytes; in nhpoly1305_units() 121 srclen -= bytes; in nhpoly1305_units() 161 unsigned int bytes; in crypto_nhpoly1305_update_helper() local [all …]
|
D | ghash-generic.c | 85 if (dctx->bytes) { in ghash_update() 86 int n = min(srclen, dctx->bytes); in ghash_update() 87 u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_update() 89 dctx->bytes -= n; in ghash_update() 95 if (!dctx->bytes) in ghash_update() 107 dctx->bytes = GHASH_BLOCK_SIZE - srclen; in ghash_update() 119 if (dctx->bytes) { in ghash_flush() 120 u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_flush() 122 while (dctx->bytes--) in ghash_flush() 128 dctx->bytes = 0; in ghash_flush()
|
D | aegis128-core.c | 95 crypto_xor(state->blocks[0].bytes, msg, AEGIS_BLOCK_SIZE); in crypto_aegis128_update_u() 106 crypto_xor(key_iv.bytes, iv, AEGIS_BLOCK_SIZE); in crypto_aegis128_init() 185 crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE); in crypto_aegis128_encrypt_chunk() 189 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE); in crypto_aegis128_encrypt_chunk() 199 memcpy(msg.bytes, src, size); in crypto_aegis128_encrypt_chunk() 210 memcpy(dst, msg.bytes, size); in crypto_aegis128_encrypt_chunk() 246 crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE); in crypto_aegis128_decrypt_chunk() 250 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE); in crypto_aegis128_decrypt_chunk() 260 memcpy(msg.bytes, src, size); in crypto_aegis128_decrypt_chunk() 268 memset(msg.bytes + size, 0, AEGIS_BLOCK_SIZE - size); in crypto_aegis128_decrypt_chunk() [all …]
|
D | polyval-generic.c | 66 u32 bytes; member 161 if (dctx->bytes) { in polyval_update() 162 n = min(srclen, dctx->bytes); in polyval_update() 163 pos = dctx->buffer + dctx->bytes - 1; in polyval_update() 165 dctx->bytes -= n; in polyval_update() 171 if (!dctx->bytes) in polyval_update() 184 dctx->bytes = POLYVAL_BLOCK_SIZE - srclen; in polyval_update() 198 if (dctx->bytes) in polyval_final()
|
D | poly1305_generic.c | 76 unsigned int bytes; in crypto_poly1305_update() local 79 bytes = min(srclen, POLY1305_BLOCK_SIZE - dctx->buflen); in crypto_poly1305_update() 80 memcpy(dctx->buf + dctx->buflen, src, bytes); in crypto_poly1305_update() 81 src += bytes; in crypto_poly1305_update() 82 srclen -= bytes; in crypto_poly1305_update() 83 dctx->buflen += bytes; in crypto_poly1305_update()
|
D | xor.c | 27 xor_blocks(unsigned int src_count, unsigned int bytes, void *dest, void **srcs) in xor_blocks() argument 33 active_template->do_2(bytes, dest, p1); in xor_blocks() 39 active_template->do_3(bytes, dest, p1, p2); in xor_blocks() 45 active_template->do_4(bytes, dest, p1, p2, p3); in xor_blocks() 50 active_template->do_5(bytes, dest, p1, p2, p3, p4); in xor_blocks()
|
D | aegis.h | 21 u8 bytes[AEGIS_BLOCK_SIZE]; member 64 const u8 *s = src->bytes; in crypto_aegis_aesenc()
|
D | adiantum.c | 88 u8 bytes[XCHACHA_IV_SIZE]; member 293 crypto_cipher_decrypt_one(tctx->blockcipher, rctx->rbuf.bytes, in adiantum_finish() 294 rctx->rbuf.bytes); in adiantum_finish() 353 crypto_cipher_encrypt_one(tctx->blockcipher, rctx->rbuf.bytes, in adiantum_crypt() 354 rctx->rbuf.bytes); in adiantum_crypt()
|
D | vmac.c | 69 u8 bytes[VMAC_NONCEBYTES]; member 501 memcpy(&dctx->nonce.bytes[dctx->nonce_size], p, n); in vmac_update() 578 if (dctx->nonce.bytes[0] & 0x80) in vmac_final() 586 index = dctx->nonce.bytes[VMAC_NONCEBYTES - 1] & 1; in vmac_final() 587 dctx->nonce.bytes[VMAC_NONCEBYTES - 1] &= ~1; in vmac_final() 588 crypto_cipher_encrypt_one(tctx->cipher, dctx->nonce.bytes, in vmac_final() 589 dctx->nonce.bytes); in vmac_final()
|
D | af_alg.c | 535 unsigned int af_alg_count_tsgl(struct sock *sk, size_t bytes, size_t offset) in af_alg_count_tsgl() argument 543 if (!bytes) in af_alg_count_tsgl() 555 bytes -= sg[i].length; in af_alg_count_tsgl() 565 if (bytes_count >= bytes) in af_alg_count_tsgl() 568 bytes -= bytes_count; in af_alg_count_tsgl()
|
D | Kconfig | 435 256 bytes each, and attempts to eliminate data dependent latencies by 840 multiple of 16 bytes. 969 of any size between 1 and 64 bytes. The keyed hash is also implemented.
|
/crypto/async_tx/ |
D | async_raid6_recov.c | 153 __2data_recov_4(int disks, size_t bytes, int faila, int failb, in __2data_recov_4() argument 187 tx = async_sum_product(b, b_off, srcs, src_offs, coef, bytes, submit); in __2data_recov_4() 196 tx = async_xor_offs(a, a_off, srcs, src_offs, 2, bytes, submit); in __2data_recov_4() 203 __2data_recov_5(int disks, size_t bytes, int faila, int failb, in __2data_recov_5() argument 248 tx = async_memcpy(dp, g, dp_off, g_off, bytes, submit); in __2data_recov_5() 251 raid6_gfexp[good], bytes, submit); in __2data_recov_5() 260 tx = async_xor_offs(dp, dp_off, srcs, src_offs, 2, bytes, submit); in __2data_recov_5() 269 tx = async_xor_offs(dq, dq_off, srcs, src_offs, 2, bytes, submit); in __2data_recov_5() 279 tx = async_sum_product(dq, dq_off, srcs, src_offs, coef, bytes, submit); in __2data_recov_5() 288 tx = async_xor_offs(dp, dp_off, srcs, src_offs, 2, bytes, submit); in __2data_recov_5() [all …]
|
D | raid6test.c | 57 static void raid6_dual_recov(int disks, size_t bytes, int faila, int failb, in raid6_dual_recov() argument 73 disks, bytes, &submit); in raid6_dual_recov() 93 tx = async_xor(dest, blocks, 0, count, bytes, &submit); in raid6_dual_recov() 97 disks, bytes, &submit); in raid6_dual_recov() 103 tx = async_raid6_datap_recov(disks, bytes, in raid6_dual_recov() 108 tx = async_raid6_2data_recov(disks, bytes, in raid6_dual_recov() 115 disks, bytes, &result, spare, 0, &submit); in raid6_dual_recov()
|