/drivers/base/regmap/ |
D | regcache-lzo.c | 22 size_t dst_len; member 49 if (ret != LZO_E_OK || compress_size > lzo_ctx->dst_len) in regcache_lzo_compress() 51 lzo_ctx->dst_len = compress_size; in regcache_lzo_compress() 57 size_t dst_len; in regcache_lzo_decompress() local 60 dst_len = lzo_ctx->dst_len; in regcache_lzo_decompress() 62 lzo_ctx->dst, &dst_len); in regcache_lzo_decompress() 63 if (ret != LZO_E_OK || dst_len != lzo_ctx->dst_len) in regcache_lzo_decompress() 73 lzo_ctx->dst_len = lzo1x_worst_compress(PAGE_SIZE); in regcache_lzo_compress_cache_block() 74 lzo_ctx->dst = kmalloc(lzo_ctx->dst_len, GFP_KERNEL); in regcache_lzo_compress_cache_block() 76 lzo_ctx->dst_len = 0; in regcache_lzo_compress_cache_block() [all …]
|
/drivers/gpu/drm/ |
D | drm_format_helper.c | 175 size_t dst_len = linepixels * sizeof(u16); in drm_fb_xrgb8888_to_rgb565() local 192 dst += dst_len; in drm_fb_xrgb8888_to_rgb565() 219 size_t dst_len = linepixels * sizeof(u16); in drm_fb_xrgb8888_to_rgb565_dstclip() local 223 dbuf = kmalloc(dst_len, GFP_KERNEL); in drm_fb_xrgb8888_to_rgb565_dstclip() 231 memcpy_toio(dst, dbuf, dst_len); in drm_fb_xrgb8888_to_rgb565_dstclip() 233 dst += dst_len; in drm_fb_xrgb8888_to_rgb565_dstclip() 271 size_t dst_len = linepixels * 3; in drm_fb_xrgb8888_to_rgb888_dstclip() local 275 dbuf = kmalloc(dst_len, GFP_KERNEL); in drm_fb_xrgb8888_to_rgb888_dstclip() 283 memcpy_toio(dst, dbuf, dst_len); in drm_fb_xrgb8888_to_rgb888_dstclip() 285 dst += dst_len; in drm_fb_xrgb8888_to_rgb888_dstclip()
|
/drivers/block/zram/ |
D | zcomp.c | 116 const void *src, unsigned int *dst_len) in zcomp_compress() argument 132 *dst_len = PAGE_SIZE * 2; in zcomp_compress() 136 zstrm->buffer, dst_len); in zcomp_compress() 142 unsigned int dst_len = PAGE_SIZE; in zcomp_decompress() local 146 dst, &dst_len); in zcomp_decompress()
|
D | zcomp.h | 37 const void *src, unsigned int *dst_len);
|
/drivers/crypto/virtio/ |
D | virtio_crypto_algs.c | 340 u64 dst_len; in __virtio_crypto_skcipher_do_req() local 390 dst_len = virtio_crypto_alg_sg_nents_length(req->dst); in __virtio_crypto_skcipher_do_req() 391 if (unlikely(dst_len > U32_MAX)) { in __virtio_crypto_skcipher_do_req() 397 dst_len = min_t(unsigned int, req->cryptlen, dst_len); in __virtio_crypto_skcipher_do_req() 399 req->cryptlen, dst_len); in __virtio_crypto_skcipher_do_req() 401 if (unlikely(req->cryptlen + dst_len + ivsize + in __virtio_crypto_skcipher_do_req() 409 cpu_to_le32((uint32_t)dst_len); in __virtio_crypto_skcipher_do_req()
|
D | virtio_crypto_akcipher_algs.c | 99 vc_akcipher_req->dst_buf, akcipher_req->dst_len); in virtio_crypto_dataq_akcipher_callback() 231 unsigned int src_len = verify ? req->src_len + req->dst_len : req->src_len; in __virtio_crypto_akcipher_do_req() 253 dst_buf = kcalloc_node(req->dst_len, 1, GFP_KERNEL, node); in __virtio_crypto_akcipher_do_req() 257 sg_init_one(&dstdata_sg, dst_buf, req->dst_len); in __virtio_crypto_akcipher_do_req() 311 akcipher_req->para.dst_data_len = cpu_to_le32(req->dst_len); in virtio_crypto_rsa_do_req()
|
/drivers/crypto/qat/qat_common/ |
D | qat_asym_algs.c | 159 areq->dst_len = req->ctx.dh->p_size; in qat_dh_cb() 162 areq->dst_len, 1); in qat_dh_cb() 227 if (req->dst_len < ctx->p_size) { in qat_dh_compute_value() 228 req->dst_len = ctx->p_size; in qat_dh_compute_value() 308 if (sg_is_last(req->dst) && req->dst_len == ctx->p_size) { in qat_dh_compute_value() 311 req->dst_len, in qat_dh_compute_value() 525 areq->dst_len = req->ctx.rsa->key_sz; in qat_rsa_cb() 528 areq->dst_len, 1); in qat_rsa_cb() 655 if (req->dst_len < ctx->key_sz) { in qat_rsa_enc() 656 req->dst_len = ctx->key_sz; in qat_rsa_enc() [all …]
|
/drivers/crypto/ccp/ |
D | ccp-dmaengine.c | 368 unsigned int dst_offset, dst_len; in ccp_create_desc() local 388 dst_len = sg_dma_len(dst_sg); in ccp_create_desc() 406 if (!dst_len) { in ccp_create_desc() 415 dst_len = sg_dma_len(dst_sg); in ccp_create_desc() 420 len = min(dst_len, src_len); in ccp_create_desc() 453 dst_len -= len; in ccp_create_desc()
|
D | ccp-crypto-rsa.c | 52 req->dst_len = rctx->cmd.u.rsa.key_size >> 3; in ccp_rsa_complete()
|
/drivers/crypto/hisilicon/hpre/ |
D | hpre_crypto.c | 377 areq->dst_len = ctx->key_sz; in hpre_dh_cb() 403 areq->dst_len = ctx->key_sz; in hpre_rsa_cb() 449 if (akreq->dst_len < ctx->key_sz) { in hpre_msg_request_set() 450 akreq->dst_len = ctx->key_sz; in hpre_msg_request_set() 463 if (kreq->dst_len < ctx->key_sz) { in hpre_msg_request_set() 464 kreq->dst_len = ctx->key_sz; in hpre_msg_request_set() 535 ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 1); in hpre_dh_compute_value() 748 ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 0); in hpre_rsa_enc() 803 ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 0); in hpre_rsa_dec()
|
/drivers/media/usb/msi2500/ |
D | msi2500.c | 245 unsigned int i, j, transactions, dst_len = 0; in msi2500_convert_stream() local 281 dst_len += 1008; in msi2500_convert_stream() 303 dst_len += 1008; in msi2500_convert_stream() 313 dst_len += 984; in msi2500_convert_stream() 320 dst_len += 1008; in msi2500_convert_stream() 327 dst_len += 1008; in msi2500_convert_stream() 334 dst_len += 1008; in msi2500_convert_stream() 356 return dst_len; in msi2500_convert_stream()
|
/drivers/net/netdevsim/ |
D | fib.c | 239 fen_info->dst_len, AF_INET, fen_info->tb_id); in nsim_fib4_rt_create() 263 fen_info->dst_len, AF_INET, in nsim_fib4_rt_lookup() 276 int dst_len = fib4_rt->common.key.prefix_len; in nsim_fib4_rt_hw_flags_set() local 282 fri.dst_len = dst_len; in nsim_fib4_rt_hw_flags_set()
|
/drivers/crypto/caam/ |
D | caampkc.c | 285 dst_nents = sg_nents_for_len(req->dst, req->dst_len); in rsa_edesc_alloc() 329 sg_to_sec4_sg_last(req->dst, req->dst_len, in rsa_edesc_alloc() 717 if (req->dst_len < key->n_sz) { in caam_rsa_enc() 718 req->dst_len = key->n_sz; in caam_rsa_enc() 841 if (req->dst_len < key->n_sz) { in caam_rsa_dec() 842 req->dst_len = key->n_sz; in caam_rsa_dec()
|
D | caamhash.c | 544 struct ahash_request *req, int dst_len) in ahash_unmap() argument 564 struct ahash_request *req, int dst_len, u32 flag) in ahash_unmap_ctx() argument 572 ahash_unmap(dev, edesc, req, dst_len); in ahash_unmap_ctx() 782 int dst_len, enum dma_data_direction dir) in ahash_enqueue_req() argument 804 ahash_unmap_ctx(jrdev, edesc, req, dst_len, dir); in ahash_enqueue_req()
|
D | caamalg_qi.c | 947 int src_len, dst_len = 0; in aead_edesc_alloc() local 989 dst_len = src_len + (encrypt ? authsize : (-authsize)); in aead_edesc_alloc() 999 dst_nents = sg_nents_for_len(req->dst, dst_len); in aead_edesc_alloc() 1002 dst_len); in aead_edesc_alloc() 1115 sg_to_qm_sg_last(req->dst, dst_len, sg_table + qm_sg_index, 0); in aead_edesc_alloc()
|
D | caamalg.c | 1300 int src_len, dst_len = 0; in aead_edesc_alloc() local 1307 dst_len = src_len + (encrypt ? authsize : (-authsize)); in aead_edesc_alloc() 1316 dst_nents = sg_nents_for_len(req->dst, dst_len); in aead_edesc_alloc() 1319 dst_len); in aead_edesc_alloc() 1409 sg_to_sec4_sg_last(req->dst, dst_len, in aead_edesc_alloc()
|
/drivers/crypto/ |
D | atmel-ecc.c | 62 n_sz = min_t(size_t, ctx->n_sz, req->dst_len); in atmel_ecdh_done() 166 nbytes = min_t(size_t, ATMEL_ECC_PUBKEY_SIZE, req->dst_len); in atmel_ecdh_generate_public_key()
|
D | talitos.c | 1331 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len; in talitos_edesc_alloc() local 1353 dst_len = 0; in talitos_edesc_alloc() 1362 dst_len = assoclen + cryptlen + (encrypt ? authsize : 0); in talitos_edesc_alloc() 1363 dst_nents = sg_nents_for_len(dst, dst_len); in talitos_edesc_alloc() 1380 (dst_nents ? dst_len : 0) + authsize; in talitos_edesc_alloc()
|
/drivers/media/usb/airspy/ |
D | airspy.c | 234 unsigned int dst_len; in airspy_convert_stream() local 238 dst_len = src_len; in airspy_convert_stream() 240 dst_len = 0; in airspy_convert_stream() 260 return dst_len; in airspy_convert_stream()
|
/drivers/video/fbdev/omap2/omapfb/dss/ |
D | omapdss-boot-init.c | 60 static void __init omapdss_prefix_strcpy(char *dst, int dst_len, in omapdss_prefix_strcpy() argument
|
/drivers/gpu/drm/omapdrm/dss/ |
D | omapdss-boot-init.c | 60 static void __init omapdss_prefix_strcpy(char *dst, int dst_len, in omapdss_prefix_strcpy() argument
|
/drivers/media/dvb-frontends/ |
D | rtl2832_sdr.c | 175 unsigned int dst_len; in rtl2832_sdr_convert_stream() local 180 dst_len = src_len; in rtl2832_sdr_convert_stream() 188 dst_len = 2 * src_len; in rtl2832_sdr_convert_stream() 190 dst_len = 0; in rtl2832_sdr_convert_stream() 210 return dst_len; in rtl2832_sdr_convert_stream()
|
/drivers/net/ethernet/intel/ice/ |
D | ice_flex_pipe.c | 2707 u32 dst_len, sect_len, offset = 0; in ice_fill_tbl() local 2742 dst_len = hw->blk[block_id].xlt1.count * in ice_fill_tbl() 2755 dst_len = hw->blk[block_id].xlt2.count * in ice_fill_tbl() 2768 dst_len = hw->blk[block_id].prof.count * in ice_fill_tbl() 2781 dst_len = hw->blk[block_id].prof_redir.count * in ice_fill_tbl() 2795 dst_len = (u32)(hw->blk[block_id].es.count * in ice_fill_tbl() 2806 if (offset > dst_len) in ice_fill_tbl() 2814 if ((offset + sect_len) > dst_len) in ice_fill_tbl() 2815 sect_len = dst_len - offset; in ice_fill_tbl()
|
/drivers/crypto/chelsio/ |
D | chcr_algo.c | 2588 int src_len, dst_len; in chcr_aead_dma_map() local 2596 dst_len = src_len; in chcr_aead_dma_map() 2599 dst_len = req->assoclen + req->cryptlen + (op_type ? in chcr_aead_dma_map() 2603 if (!req->cryptlen || !src_len || !dst_len) in chcr_aead_dma_map() 2626 sg_nents_for_len(req->dst, dst_len), in chcr_aead_dma_map() 2649 int src_len, dst_len; in chcr_aead_dma_unmap() local 2657 dst_len = src_len; in chcr_aead_dma_unmap() 2660 dst_len = req->assoclen + req->cryptlen + (op_type ? in chcr_aead_dma_unmap() 2664 if (!req->cryptlen || !src_len || !dst_len) in chcr_aead_dma_unmap() 2678 sg_nents_for_len(req->dst, dst_len), in chcr_aead_dma_unmap()
|
/drivers/net/ethernet/rocker/ |
D | rocker_ofdpa.c | 2268 int dst_len, struct fib_info *fi, u32 tb_id, in ofdpa_port_fib_ipv4() argument 2273 __be32 dst_mask = inet_make_mask(dst_len); in ofdpa_port_fib_ipv4() 2747 fen_info->dst_len, fen_info->fi, in ofdpa_fib4_add() 2770 fen_info->dst_len, fen_info->fi, in ofdpa_fib4_del()
|