/crypto/async_tx/ |
D | async_xor.c | 79 xor_src_cnt, unmap->len, in do_async_xor() 90 xor_src_cnt, unmap->len, in do_async_xor() 114 int src_cnt, size_t len, struct async_submit_ctl *submit) in do_sync_xor() argument 136 memset(dest_buf, 0, len); in do_sync_xor() 141 xor_blocks(xor_src_cnt, len, dest_buf, &srcs[src_off]); in do_sync_xor() 174 int src_cnt, size_t len, struct async_submit_ctl *submit) in async_xor() argument 178 src_cnt, len); in async_xor() 187 if (unmap && is_dma_xor_aligned(device, offset, 0, len)) { in async_xor() 192 pr_debug("%s (async): len: %zu\n", __func__, len); in async_xor() 194 unmap->len = len; in async_xor() [all …]
|
D | async_pq.c | 96 &scfs[src_off], unmap->len, in do_async_gen_syndrome() 123 size_t len, struct async_submit_ctl *submit) in do_sync_gen_syndrome() argument 140 raid6_call.gen_syndrome(disks, len, srcs); in do_sync_gen_syndrome() 167 size_t len, struct async_submit_ctl *submit) in async_gen_syndrome() argument 172 blocks, src_cnt, len); in async_gen_syndrome() 184 is_dma_pq_aligned(device, offset, 0, len)) { in async_gen_syndrome() 192 __func__, disks, len); in async_gen_syndrome() 197 unmap->len = len; in async_gen_syndrome() 202 len, DMA_TO_DEVICE); in async_gen_syndrome() 215 offset, len, DMA_BIDIRECTIONAL); in async_gen_syndrome() [all …]
|
D | async_memcpy.c | 46 unsigned int src_offset, size_t len, in async_memcpy() argument 50 &dest, 1, &src, 1, len); in async_memcpy() 58 if (unmap && is_dma_copy_aligned(device, src_offset, dest_offset, len)) { in async_memcpy() 67 unmap->addr[0] = dma_map_page(device->dev, src, src_offset, len, in async_memcpy() 70 unmap->addr[1] = dma_map_page(device->dev, dest, dest_offset, len, in async_memcpy() 72 unmap->len = len; in async_memcpy() 75 unmap->addr[0], len, in async_memcpy() 80 pr_debug("%s: (async) len: %zu\n", __func__, len); in async_memcpy() 86 pr_debug("%s: (sync) len: %zu\n", __func__, len); in async_memcpy() 94 memcpy(dest_buf, src_buf, len); in async_memcpy()
|
D | async_raid6_recov.c | 33 size_t len, struct async_submit_ctl *submit) in async_sum_product() argument 36 &dest, 1, srcs, 2, len); in async_sum_product() 54 unmap->addr[0] = dma_map_page(dev, srcs[0], 0, len, DMA_TO_DEVICE); in async_sum_product() 55 unmap->addr[1] = dma_map_page(dev, srcs[1], 0, len, DMA_TO_DEVICE); in async_sum_product() 58 unmap->addr[2] = dma_map_page(dev, dest, 0, len, DMA_BIDIRECTIONAL); in async_sum_product() 63 unmap->len = len; in async_sum_product() 65 len, dma_flags); in async_sum_product() 87 while (len--) { in async_sum_product() 97 async_mult(struct page *dest, struct page *src, u8 coef, size_t len, in async_mult() argument 101 &dest, 1, &src, 1, len); in async_mult() [all …]
|
/crypto/ |
D | drbg.c | 300 size_t len = addlen; in drbg_add_buf() local 304 while (len) { in drbg_add_buf() 308 len--; dstptr--; addptr--; in drbg_add_buf() 310 len = dstlen - addlen; in drbg_add_buf() 311 while (len && remainder > 0) { in drbg_add_buf() 315 len--; dstptr--; in drbg_add_buf() 349 size_t len = curr->len; in drbg_ctr_bcc() local 351 while (len) { in drbg_ctr_bcc() 362 len--; in drbg_ctr_bcc() 451 inputlen += seed->len; in drbg_ctr_df() [all …]
|
D | xcbc.c | 57 unsigned int len; member 89 ctx->len = 0; in crypto_xcbc_digest_init() 96 unsigned int len) in crypto_xcbc_digest_update() argument 108 if ((ctx->len + len) <= bs) { in crypto_xcbc_digest_update() 109 memcpy(odds + ctx->len, p, len); in crypto_xcbc_digest_update() 110 ctx->len += len; in crypto_xcbc_digest_update() 115 memcpy(odds + ctx->len, p, bs - ctx->len); in crypto_xcbc_digest_update() 116 len -= bs - ctx->len; in crypto_xcbc_digest_update() 117 p += bs - ctx->len; in crypto_xcbc_digest_update() 123 ctx->len = 0; in crypto_xcbc_digest_update() [all …]
|
D | cmac.c | 50 unsigned int len; member 115 ctx->len = 0; in crypto_cmac_digest_init() 122 unsigned int len) in crypto_cmac_digest_update() argument 134 if ((ctx->len + len) <= bs) { in crypto_cmac_digest_update() 135 memcpy(odds + ctx->len, p, len); in crypto_cmac_digest_update() 136 ctx->len += len; in crypto_cmac_digest_update() 141 memcpy(odds + ctx->len, p, bs - ctx->len); in crypto_cmac_digest_update() 142 len -= bs - ctx->len; in crypto_cmac_digest_update() 143 p += bs - ctx->len; in crypto_cmac_digest_update() 149 ctx->len = 0; in crypto_cmac_digest_update() [all …]
|
D | crc32.c | 42 static u32 __crc32_le(u32 crc, unsigned char const *p, size_t len) in __crc32_le() argument 44 return crc32_le(crc, p, len); in __crc32_le() 87 unsigned int len) in crc32_update() argument 91 *crcp = __crc32_le(*crcp, data, len); in crc32_update() 96 static int __crc32_finup(u32 *crcp, const u8 *data, unsigned int len, in __crc32_finup() argument 99 *(__le32 *)out = cpu_to_le32(__crc32_le(*crcp, data, len)); in __crc32_finup() 104 unsigned int len, u8 *out) in crc32_finup() argument 106 return __crc32_finup(shash_desc_ctx(desc), data, len, out); in crc32_finup() 118 unsigned int len, u8 *out) in crc32_digest() argument 120 return __crc32_finup(crypto_shash_ctx(desc->tfm), data, len, in crc32_digest()
|
D | michael_mic.c | 63 unsigned int len) in michael_update() argument 70 if (flen > len) in michael_update() 71 flen = len; in michael_update() 75 len -= flen; in michael_update() 88 while (len >= 4) { in michael_update() 91 len -= 4; in michael_update() 94 if (len > 0) { in michael_update() 95 mctx->pending_len = len; in michael_update() 96 memcpy(mctx->pending, src, len); in michael_update()
|
D | md5.c | 63 static int md5_update(struct shash_desc *desc, const u8 *data, unsigned int len) in md5_update() argument 68 mctx->byte_count += len; in md5_update() 70 if (avail > len) { in md5_update() 72 data, len); in md5_update() 81 len -= avail; in md5_update() 83 while (len >= sizeof(mctx->block)) { in md5_update() 87 len -= sizeof(mctx->block); in md5_update() 90 memcpy(mctx->block, data, len); in md5_update()
|
D | shash.c | 68 static inline unsigned int shash_align_buffer_size(unsigned len, in shash_align_buffer_size() argument 72 return len + (mask & ~(__alignof__(u8_aligned) - 1)); in shash_align_buffer_size() 76 unsigned int len) in shash_update_unaligned() argument 88 if (unaligned_len > len) in shash_update_unaligned() 89 unaligned_len = len; in shash_update_unaligned() 96 shash->update(desc, data + unaligned_len, len - unaligned_len); in shash_update_unaligned() 100 unsigned int len) in crypto_shash_update() argument 107 return shash_update_unaligned(desc, data, len); in crypto_shash_update() 109 return shash->update(desc, data, len); in crypto_shash_update() 149 unsigned int len, u8 *out) in shash_finup_unaligned() argument [all …]
|
D | algif_hash.c | 31 unsigned int len; member 73 int len = min_t(unsigned long, seglen, limit); in hash_sendmsg() local 76 newlen = af_alg_make_sg(&ctx->sgl, from, len, 0); in hash_sendmsg() 161 struct msghdr *msg, size_t len, int flags) in hash_recvmsg() argument 169 if (len > ds) in hash_recvmsg() 170 len = ds; in hash_recvmsg() 171 else if (len < ds) in hash_recvmsg() 184 err = memcpy_toiovec(msg->msg_iov, ctx->result, len); in hash_recvmsg() 189 return err ?: len; in hash_recvmsg() 402 sock_kfree_s(sk, ctx, ctx->len); in hash_sock_destruct() [all …]
|
D | algif_skcipher.c | 49 unsigned int len; member 298 unsigned long len = size; in skcipher_sendmsg() local 305 len = min_t(unsigned long, len, in skcipher_sendmsg() 310 msg->msg_iov, len); in skcipher_sendmsg() 314 sg->length += len; in skcipher_sendmsg() 318 ctx->used += len; in skcipher_sendmsg() 319 copied += len; in skcipher_sendmsg() 320 size -= len; in skcipher_sendmsg() 330 len = min_t(unsigned long, len, skcipher_sndbuf(sk)); in skcipher_sendmsg() 340 plen = min_t(int, len, PAGE_SIZE); in skcipher_sendmsg() [all …]
|
D | algboss.c | 101 unsigned int len; in cryptomgr_schedule_probe() local 114 len = p - name; in cryptomgr_schedule_probe() 115 if (!len || *p != '(') in cryptomgr_schedule_probe() 118 memcpy(param->template, name, len); in cryptomgr_schedule_probe() 125 len = 0; in cryptomgr_schedule_probe() 146 len = p - name; in cryptomgr_schedule_probe() 147 if (!len) in cryptomgr_schedule_probe() 154 memcpy(param->attrs[i].alg.data.name, name, len); in cryptomgr_schedule_probe()
|
D | sha1_generic.c | 40 unsigned int len) in crypto_sha1_update() argument 47 sctx->count += len; in crypto_sha1_update() 51 if ((partial + len) >= SHA1_BLOCK_SIZE) { in crypto_sha1_update() 65 } while (done + SHA1_BLOCK_SIZE <= len); in crypto_sha1_update() 70 memcpy(sctx->buffer + partial, src, len - done); in crypto_sha1_update()
|
D | crct10dif_generic.c | 68 static int __chksum_finup(__u16 *crcp, const u8 *data, unsigned int len, in __chksum_finup() argument 71 *(__u16 *)out = crc_t10dif_generic(*crcp, data, len); in __chksum_finup() 76 unsigned int len, u8 *out) in chksum_finup() argument 80 return __chksum_finup(&ctx->crc, data, len, out); in chksum_finup()
|
D | tgr192.c | 515 unsigned int len) in tgr192_update() argument 528 for (; len && tctx->count < 64; len--) { in tgr192_update() 532 if (!len) { in tgr192_update() 538 while (len >= 64) { in tgr192_update() 542 len -= 64; in tgr192_update() 545 for (; len && tctx->count < 64; len--) { in tgr192_update()
|
D | md4.c | 169 static int md4_update(struct shash_desc *desc, const u8 *data, unsigned int len) in md4_update() argument 174 mctx->byte_count += len; in md4_update() 176 if (avail > len) { in md4_update() 178 data, len); in md4_update() 187 len -= avail; in md4_update() 189 while (len >= sizeof(mctx->block)) { in md4_update() 193 len -= sizeof(mctx->block); in md4_update() 196 memcpy(mctx->block, data, len); in md4_update()
|
D | crc32c_generic.c | 106 static int __chksum_finup(u32 *crcp, const u8 *data, unsigned int len, u8 *out) in __chksum_finup() argument 108 *(__le32 *)out = ~cpu_to_le32(__crc32c_le(*crcp, data, len)); in __chksum_finup() 113 unsigned int len, u8 *out) in chksum_finup() argument 117 return __chksum_finup(&ctx->crc, data, len, out); in chksum_finup()
|
D | vmac.c | 328 static u64 l3hash(u64 p1, u64 p2, u64 k1, u64 k2, u64 len) in l3hash() argument 335 ADD128(p1, p2, len, t); in l3hash() 563 unsigned int len) in vmac_update() argument 573 min = len < expand ? len : expand; in vmac_update() 578 if (len < expand) in vmac_update() 584 len -= expand; in vmac_update() 587 if (len % VMAC_NHBYTES) { in vmac_update() 588 memcpy(ctx->partial, p + len - (len % VMAC_NHBYTES), in vmac_update() 589 len % VMAC_NHBYTES); in vmac_update() 590 ctx->partial_size = len % VMAC_NHBYTES; in vmac_update() [all …]
|
D | crct10dif_common.c | 70 __u16 crc_t10dif_generic(__u16 crc, const unsigned char *buffer, size_t len) in crc_t10dif_generic() argument 74 for (i = 0 ; i < len ; i++) in crc_t10dif_generic()
|
D | eseqiv.c | 82 unsigned int len; in eseqiv_givencrypt() local 130 len = ivsize; in eseqiv_givencrypt() 133 len = sizeof(u64); in eseqiv_givencrypt() 136 memcpy(req->giv + ivsize - len, &seq, len); in eseqiv_givencrypt()
|
D | sha512_generic.c | 168 unsigned int len) in crypto_sha512_update() argument 178 if ((sctx->count[0] += len) < len) in crypto_sha512_update() 184 if (len >= part_len) { in crypto_sha512_update() 188 for (i = part_len; i + 127 < len; i+=128) in crypto_sha512_update() 197 memcpy(&sctx->buf[index], &data[i], len - i); in crypto_sha512_update()
|
/crypto/asymmetric_keys/ |
D | asymmetric_type.c | 46 kid->len = len_1 + len_2; in asymmetric_key_generate_id() 62 if (kid1->len != kid2->len) in asymmetric_key_id_same() 64 return memcmp(kid1->data, kid2->data, kid1->len) == 0; in asymmetric_key_id_same() 78 if (kid1->len < kid2->len) in asymmetric_key_id_partial() 80 return memcmp(kid1->data + (kid1->len - kid2->len), in asymmetric_key_id_partial() 81 kid2->data, kid2->len) == 0; in asymmetric_key_id_partial() 112 match_id->len = hexlen; in __asymmetric_key_hex_to_key_id() 247 n = kid->len; in asymmetric_key_describe()
|
D | pkcs7_verify.c | 162 sinfo->signing_cert_id->len, sinfo->signing_cert_id->data); in pkcs7_find_key() 192 x509->authority->len, x509->authority->data); in pkcs7_verify_sig_chain() 219 x509->authority->len, x509->authority->data); in pkcs7_verify_sig_chain() 224 p->index, p->skid->len, p->skid->data); in pkcs7_verify_sig_chain() 342 n, x509->authority->len, x509->authority->data); in pkcs7_verify()
|