/drivers/base/regmap/ |
D | regcache-lzo.c | 22 size_t dst_len; member 49 if (ret != LZO_E_OK || compress_size > lzo_ctx->dst_len) in regcache_lzo_compress() 51 lzo_ctx->dst_len = compress_size; in regcache_lzo_compress() 57 size_t dst_len; in regcache_lzo_decompress() local 60 dst_len = lzo_ctx->dst_len; in regcache_lzo_decompress() 62 lzo_ctx->dst, &dst_len); in regcache_lzo_decompress() 63 if (ret != LZO_E_OK || dst_len != lzo_ctx->dst_len) in regcache_lzo_decompress() 73 lzo_ctx->dst_len = lzo1x_worst_compress(PAGE_SIZE); in regcache_lzo_compress_cache_block() 74 lzo_ctx->dst = kmalloc(lzo_ctx->dst_len, GFP_KERNEL); in regcache_lzo_compress_cache_block() 76 lzo_ctx->dst_len = 0; in regcache_lzo_compress_cache_block() [all …]
|
/drivers/gpu/drm/ |
D | drm_format_helper.c | 176 size_t dst_len = linepixels * sizeof(u16); in drm_fb_xrgb8888_to_rgb565() local 193 dst += dst_len; in drm_fb_xrgb8888_to_rgb565() 220 size_t dst_len = linepixels * sizeof(u16); in drm_fb_xrgb8888_to_rgb565_dstclip() local 224 dbuf = kmalloc(dst_len, GFP_KERNEL); in drm_fb_xrgb8888_to_rgb565_dstclip() 232 memcpy_toio(dst, dbuf, dst_len); in drm_fb_xrgb8888_to_rgb565_dstclip() 234 dst += dst_len; in drm_fb_xrgb8888_to_rgb565_dstclip() 272 size_t dst_len = linepixels * 3; in drm_fb_xrgb8888_to_rgb888_dstclip() local 276 dbuf = kmalloc(dst_len, GFP_KERNEL); in drm_fb_xrgb8888_to_rgb888_dstclip() 284 memcpy_toio(dst, dbuf, dst_len); in drm_fb_xrgb8888_to_rgb888_dstclip() 286 dst += dst_len; in drm_fb_xrgb8888_to_rgb888_dstclip()
|
/drivers/block/zram/ |
D | zcomp.c | 118 const void *src, unsigned int *dst_len) in zcomp_compress() argument 134 *dst_len = PAGE_SIZE * 2; in zcomp_compress() 138 zstrm->buffer, dst_len); in zcomp_compress() 144 unsigned int dst_len = PAGE_SIZE; in zcomp_decompress() local 148 dst, &dst_len); in zcomp_decompress()
|
D | zcomp.h | 37 const void *src, unsigned int *dst_len);
|
/drivers/net/ethernet/mellanox/mlx5/core/ |
D | lag_mp.c | 103 static void mlx5_lag_fib_set(struct lag_mp *mp, struct fib_info *fi, u32 dst, int dst_len) in mlx5_lag_fib_set() argument 108 mp->fib.dst_len = dst_len; in mlx5_lag_fib_set() 139 (mp->fib.dst != fen_info->dst || mp->fib.dst_len != fen_info->dst_len) && in mlx5_lag_fib_route_event() 156 mlx5_lag_fib_set(mp, fi, fen_info->dst, fen_info->dst_len); in mlx5_lag_fib_route_event() 186 mlx5_lag_fib_set(mp, fi, fen_info->dst, fen_info->dst_len); in mlx5_lag_fib_route_event()
|
D | lag_mp.h | 22 int dst_len; member
|
/drivers/crypto/virtio/ |
D | virtio_crypto_algs.c | 353 u64 dst_len; in __virtio_crypto_skcipher_do_req() local 403 dst_len = virtio_crypto_alg_sg_nents_length(req->dst); in __virtio_crypto_skcipher_do_req() 404 if (unlikely(dst_len > U32_MAX)) { in __virtio_crypto_skcipher_do_req() 410 dst_len = min_t(unsigned int, req->cryptlen, dst_len); in __virtio_crypto_skcipher_do_req() 412 req->cryptlen, dst_len); in __virtio_crypto_skcipher_do_req() 414 if (unlikely(req->cryptlen + dst_len + ivsize + in __virtio_crypto_skcipher_do_req() 422 cpu_to_le32((uint32_t)dst_len); in __virtio_crypto_skcipher_do_req()
|
/drivers/crypto/qat/qat_common/ |
D | qat_asym_algs.c | 172 areq->dst_len = req->ctx.dh->p_size; in qat_dh_cb() 177 areq->dst_len, 1); in qat_dh_cb() 234 if (req->dst_len < ctx->p_size) { in qat_dh_compute_value() 235 req->dst_len = ctx->p_size; in qat_dh_compute_value() 316 if (sg_is_last(req->dst) && req->dst_len == ctx->p_size) { in qat_dh_compute_value() 528 areq->dst_len = req->ctx.rsa->key_sz; in qat_rsa_cb() 533 areq->dst_len, 1); in qat_rsa_cb() 661 if (req->dst_len < ctx->key_sz) { in qat_rsa_enc() 662 req->dst_len = ctx->key_sz; in qat_rsa_enc() 715 if (sg_is_last(req->dst) && req->dst_len == ctx->key_sz) { in qat_rsa_enc() [all …]
|
/drivers/crypto/hisilicon/hpre/ |
D | hpre_crypto.c | 426 areq->dst_len = ctx->key_sz; in hpre_dh_cb() 452 areq->dst_len = ctx->key_sz; in hpre_rsa_cb() 508 if (akreq->dst_len < ctx->key_sz) { in hpre_msg_request_set() 509 akreq->dst_len = ctx->key_sz; in hpre_msg_request_set() 522 if (kreq->dst_len < ctx->key_sz) { in hpre_msg_request_set() 523 kreq->dst_len = ctx->key_sz; in hpre_msg_request_set() 595 ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 1); in hpre_dh_compute_value() 807 ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 0); in hpre_rsa_enc() 862 ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 0); in hpre_rsa_dec() 1455 areq->dst_len = ctx->key_sz << 1; in hpre_ecdh_cb() [all …]
|
/drivers/crypto/ccp/ |
D | ccp-dmaengine.c | 367 unsigned int dst_offset, dst_len; in ccp_create_desc() local 387 dst_len = sg_dma_len(dst_sg); in ccp_create_desc() 405 if (!dst_len) { in ccp_create_desc() 414 dst_len = sg_dma_len(dst_sg); in ccp_create_desc() 419 len = min(dst_len, src_len); in ccp_create_desc() 452 dst_len -= len; in ccp_create_desc()
|
D | ccp-crypto-rsa.c | 52 req->dst_len = rctx->cmd.u.rsa.key_size >> 3; in ccp_rsa_complete()
|
/drivers/media/usb/msi2500/ |
D | msi2500.c | 245 unsigned int i, j, transactions, dst_len = 0; in msi2500_convert_stream() local 281 dst_len += 1008; in msi2500_convert_stream() 303 dst_len += 1008; in msi2500_convert_stream() 313 dst_len += 984; in msi2500_convert_stream() 320 dst_len += 1008; in msi2500_convert_stream() 327 dst_len += 1008; in msi2500_convert_stream() 334 dst_len += 1008; in msi2500_convert_stream() 356 return dst_len; in msi2500_convert_stream()
|
/drivers/net/netdevsim/ |
D | fib.c | 283 fen_info->dst_len, AF_INET, fen_info->tb_id); in nsim_fib4_rt_create() 307 fen_info->dst_len, AF_INET, in nsim_fib4_rt_lookup() 325 fri.dst_len = fen_info->dst_len; in nsim_fib4_rt_offload_failed_flag_set() 339 int dst_len = fib4_rt->common.key.prefix_len; in nsim_fib4_rt_hw_flags_set() local 345 fri.dst_len = dst_len; in nsim_fib4_rt_hw_flags_set()
|
/drivers/crypto/caam/ |
D | caampkc.c | 286 dst_nents = sg_nents_for_len(req->dst, req->dst_len); in rsa_edesc_alloc() 330 sg_to_sec4_sg_last(req->dst, req->dst_len, in rsa_edesc_alloc() 721 if (req->dst_len < key->n_sz) { in caam_rsa_enc() 722 req->dst_len = key->n_sz; in caam_rsa_enc() 845 if (req->dst_len < key->n_sz) { in caam_rsa_dec() 846 req->dst_len = key->n_sz; in caam_rsa_dec()
|
D | caamhash.c | 544 struct ahash_request *req, int dst_len) in ahash_unmap() argument 564 struct ahash_request *req, int dst_len, u32 flag) in ahash_unmap_ctx() argument 572 ahash_unmap(dev, edesc, req, dst_len); in ahash_unmap_ctx() 785 int dst_len, enum dma_data_direction dir) in ahash_enqueue_req() argument 807 ahash_unmap_ctx(jrdev, edesc, req, dst_len, dir); in ahash_enqueue_req()
|
D | caamalg_qi.c | 947 int src_len, dst_len = 0; in aead_edesc_alloc() local 989 dst_len = src_len + (encrypt ? authsize : (-authsize)); in aead_edesc_alloc() 999 dst_nents = sg_nents_for_len(req->dst, dst_len); in aead_edesc_alloc() 1002 dst_len); in aead_edesc_alloc() 1115 sg_to_qm_sg_last(req->dst, dst_len, sg_table + qm_sg_index, 0); in aead_edesc_alloc()
|
D | caamalg.c | 1300 int src_len, dst_len = 0; in aead_edesc_alloc() local 1307 dst_len = src_len + (encrypt ? authsize : (-authsize)); in aead_edesc_alloc() 1316 dst_nents = sg_nents_for_len(req->dst, dst_len); in aead_edesc_alloc() 1319 dst_len); in aead_edesc_alloc() 1409 sg_to_sec4_sg_last(req->dst, dst_len, in aead_edesc_alloc()
|
/drivers/crypto/ |
D | atmel-ecc.c | 59 n_sz = min_t(size_t, ATMEL_ECC_NIST_P256_N_SIZE, req->dst_len); in atmel_ecdh_done() 153 nbytes = min_t(size_t, ATMEL_ECC_PUBKEY_SIZE, req->dst_len); in atmel_ecdh_generate_public_key()
|
/drivers/media/usb/airspy/ |
D | airspy.c | 234 unsigned int dst_len; in airspy_convert_stream() local 238 dst_len = src_len; in airspy_convert_stream() 240 dst_len = 0; in airspy_convert_stream() 260 return dst_len; in airspy_convert_stream()
|
/drivers/video/fbdev/omap2/omapfb/dss/ |
D | omapdss-boot-init.c | 60 static void __init omapdss_prefix_strcpy(char *dst, int dst_len, in omapdss_prefix_strcpy() argument
|
/drivers/media/dvb-frontends/ |
D | rtl2832_sdr.c | 175 unsigned int dst_len; in rtl2832_sdr_convert_stream() local 180 dst_len = src_len; in rtl2832_sdr_convert_stream() 188 dst_len = 2 * src_len; in rtl2832_sdr_convert_stream() 190 dst_len = 0; in rtl2832_sdr_convert_stream() 210 return dst_len; in rtl2832_sdr_convert_stream()
|
/drivers/net/ethernet/mellanox/mlx5/core/steering/ |
D | dr_ste.h | 185 u8 dst_len,
|
/drivers/crypto/chelsio/ |
D | chcr_algo.c | 2582 int src_len, dst_len; in chcr_aead_dma_map() local 2590 dst_len = src_len; in chcr_aead_dma_map() 2593 dst_len = req->assoclen + req->cryptlen + (op_type ? in chcr_aead_dma_map() 2597 if (!req->cryptlen || !src_len || !dst_len) in chcr_aead_dma_map() 2620 sg_nents_for_len(req->dst, dst_len), in chcr_aead_dma_map() 2643 int src_len, dst_len; in chcr_aead_dma_unmap() local 2651 dst_len = src_len; in chcr_aead_dma_unmap() 2654 dst_len = req->assoclen + req->cryptlen + (op_type ? in chcr_aead_dma_unmap() 2658 if (!req->cryptlen || !src_len || !dst_len) in chcr_aead_dma_unmap() 2672 sg_nents_for_len(req->dst, dst_len), in chcr_aead_dma_unmap()
|
/drivers/net/ethernet/intel/ice/ |
D | ice_flex_pipe.c | 3102 u32 dst_len, sect_len, offset = 0; in ice_fill_tbl() local 3137 dst_len = hw->blk[block_id].xlt1.count * in ice_fill_tbl() 3150 dst_len = hw->blk[block_id].xlt2.count * in ice_fill_tbl() 3163 dst_len = hw->blk[block_id].prof.count * in ice_fill_tbl() 3176 dst_len = hw->blk[block_id].prof_redir.count * in ice_fill_tbl() 3190 dst_len = (u32)(hw->blk[block_id].es.count * in ice_fill_tbl() 3201 if (offset > dst_len) in ice_fill_tbl() 3209 if ((offset + sect_len) > dst_len) in ice_fill_tbl() 3210 sect_len = dst_len - offset; in ice_fill_tbl()
|
/drivers/net/ethernet/rocker/ |
D | rocker_ofdpa.c | 2268 int dst_len, struct fib_info *fi, u32 tb_id, in ofdpa_port_fib_ipv4() argument 2273 __be32 dst_mask = inet_make_mask(dst_len); in ofdpa_port_fib_ipv4() 2735 fen_info->dst_len, fen_info->fi, in ofdpa_fib4_add() 2758 fen_info->dst_len, fen_info->fi, in ofdpa_fib4_del()
|