Home
last modified time | relevance | path

Searched refs:bufcnt (Results 1 – 13 of 13) sorted by relevance

/drivers/crypto/aspeed/
Daspeed-hace-hash.c101 index = rctx->bufcnt & 0x3f; in aspeed_ahash_fill_padding()
103 *(rctx->buffer + rctx->bufcnt) = 0x80; in aspeed_ahash_fill_padding()
104 memset(rctx->buffer + rctx->bufcnt + 1, 0, padlen - 1); in aspeed_ahash_fill_padding()
105 memcpy(rctx->buffer + rctx->bufcnt + padlen, bits, 8); in aspeed_ahash_fill_padding()
106 rctx->bufcnt += padlen + 8; in aspeed_ahash_fill_padding()
112 index = rctx->bufcnt & 0x7f; in aspeed_ahash_fill_padding()
114 *(rctx->buffer + rctx->bufcnt) = 0x80; in aspeed_ahash_fill_padding()
115 memset(rctx->buffer + rctx->bufcnt + 1, 0, padlen - 1); in aspeed_ahash_fill_padding()
116 memcpy(rctx->buffer + rctx->bufcnt + padlen, bits, 16); in aspeed_ahash_fill_padding()
117 rctx->bufcnt += padlen + 16; in aspeed_ahash_fill_padding()
[all …]
Daspeed-hace.h208 size_t bufcnt; /* buffer counter */ member
/drivers/crypto/
Domap-sham.c151 size_t bufcnt; member
639 if (ctx->bufcnt) in omap_sham_copy_sg_lists()
652 if (ctx->bufcnt) { in omap_sham_copy_sg_lists()
653 sg_set_buf(tmp, ctx->dd->xmit_buf, ctx->bufcnt); in omap_sham_copy_sg_lists()
656 new_len -= ctx->bufcnt; in omap_sham_copy_sg_lists()
690 ctx->offset += new_len - ctx->bufcnt; in omap_sham_copy_sg_lists()
691 ctx->bufcnt = 0; in omap_sham_copy_sg_lists()
711 if (ctx->bufcnt) in omap_sham_copy_sgs()
712 memcpy(buf, ctx->dd->xmit_buf, ctx->bufcnt); in omap_sham_copy_sgs()
714 scatterwalk_map_and_copy(buf + ctx->bufcnt, sg, ctx->offset, in omap_sham_copy_sgs()
[all …]
Datmel-sha.c96 size_t bufcnt; member
307 while ((ctx->bufcnt < ctx->buflen) && ctx->total) { in atmel_sha_append_sg()
309 count = min(count, ctx->buflen - ctx->bufcnt); in atmel_sha_append_sg()
326 scatterwalk_map_and_copy(ctx->buffer + ctx->bufcnt, ctx->sg, in atmel_sha_append_sg()
329 ctx->bufcnt += count; in atmel_sha_append_sg()
370 size[0] += ctx->bufcnt; in atmel_sha_fill_padding()
371 if (size[0] < ctx->bufcnt) in atmel_sha_fill_padding()
384 index = ctx->bufcnt & 0x7f; in atmel_sha_fill_padding()
386 *(ctx->buffer + ctx->bufcnt) = 0x80; in atmel_sha_fill_padding()
387 memset(ctx->buffer + ctx->bufcnt + 1, 0, padlen-1); in atmel_sha_fill_padding()
[all …]
Ds5p-sss.c374 u32 bufcnt; member
1029 len = new_len + ctx->bufcnt; in s5p_hash_copy_sgs()
1039 if (ctx->bufcnt) in s5p_hash_copy_sgs()
1040 memcpy(buf, ctx->dd->xmit_buf, ctx->bufcnt); in s5p_hash_copy_sgs()
1042 scatterwalk_map_and_copy(buf + ctx->bufcnt, sg, ctx->skip, in s5p_hash_copy_sgs()
1048 ctx->bufcnt = 0; in s5p_hash_copy_sgs()
1076 if (ctx->bufcnt) in s5p_hash_copy_sg_lists()
1091 if (ctx->bufcnt) { in s5p_hash_copy_sg_lists()
1092 sg_set_buf(tmp, ctx->dd->xmit_buf, ctx->bufcnt); in s5p_hash_copy_sg_lists()
1188 if (ctx->bufcnt) { in s5p_hash_prepare_sgs()
[all …]
Dimg-hash.c104 size_t bufcnt; member
215 if (ctx->bufcnt) { in img_hash_dma_callback()
216 img_hash_xmit_cpu(hdev, ctx->buffer, ctx->bufcnt, 0); in img_hash_dma_callback()
217 ctx->bufcnt = 0; in img_hash_dma_callback()
258 ctx->bufcnt = sg_copy_to_buffer(hdev->req->src, sg_nents(ctx->sg), in img_hash_write_via_cpu()
262 ctx->bufcnt = 0; in img_hash_write_via_cpu()
405 ctx->bufcnt = sg_pcopy_to_buffer(ctx->sgfirst, ctx->nents, in img_hash_dma_task()
409 while (ctx->sg && (ctx->bufcnt < 4)) { in img_hash_dma_task()
411 if (likely(len > (4 - ctx->bufcnt))) in img_hash_dma_task()
412 len = 4 - ctx->bufcnt; in img_hash_dma_task()
[all …]
/drivers/net/hamradio/
Dbaycom_epp.c178 int bufcnt; member
188 int bufcnt; member
367 if (bc->hdlctx.bufcnt > 0) in encode_hdlc()
413 bc->hdlctx.bufcnt = wp - bc->hdlctx.buf; in encode_hdlc()
429 if (bc->hdlctx.bufcnt <= 0) in transmit()
431 if (bc->hdlctx.bufcnt <= 0) in transmit()
445 if (bc->hdlctx.state == tx_idle && bc->hdlctx.bufcnt > 0) { in transmit()
468 if (bc->hdlctx.bufcnt <= 0) { in transmit()
470 if (bc->hdlctx.bufcnt <= 0) { in transmit()
476 i = min_t(int, cnt, bc->hdlctx.bufcnt); in transmit()
[all …]
/drivers/crypto/stm32/
Dstm32-hash.c131 size_t bufcnt; member
301 while ((rctx->bufcnt < rctx->buflen) && rctx->total) { in stm32_hash_append_sg()
303 count = min(count, rctx->buflen - rctx->bufcnt); in stm32_hash_append_sg()
314 scatterwalk_map_and_copy(rctx->buffer + rctx->bufcnt, rctx->sg, in stm32_hash_append_sg()
317 rctx->bufcnt += count; in stm32_hash_append_sg()
383 int bufcnt, err = 0, final; in stm32_hash_update_cpu() local
390 (rctx->bufcnt + rctx->total >= rctx->buflen)) { in stm32_hash_update_cpu()
392 bufcnt = rctx->bufcnt; in stm32_hash_update_cpu()
393 rctx->bufcnt = 0; in stm32_hash_update_cpu()
394 err = stm32_hash_xmit_cpu(hdev, rctx->buffer, bufcnt, 0); in stm32_hash_update_cpu()
[all …]
/drivers/scsi/mpi3mr/
Dmpi3mr_app.c582 struct mpi3mr_buf_map *drv_bufs, u8 bufcnt, u8 is_rmc, in mpi3mr_bsg_build_sgl() argument
616 for (; count < bufcnt; count++, drv_buf_iter++) { in mpi3mr_bsg_build_sgl()
664 struct mpi3mr_buf_map *drv_bufs, u8 bufcnt) in mpi3mr_build_nvme_sgl() argument
680 for (count = 0; count < bufcnt; count++, drv_buf_iter++) { in mpi3mr_build_nvme_sgl()
723 struct mpi3mr_buf_map *drv_bufs, u8 bufcnt) in mpi3mr_build_nvme_prp() argument
763 for (count = 0; count < bufcnt; count++, drv_buf_iter++) { in mpi3mr_build_nvme_prp()
967 u8 count, bufcnt = 0, is_rmcb = 0, is_rmrb = 0, din_cnt = 0, dout_cnt = 0; in mpi3mr_bsg_process_mpt_cmds() local
997 bufcnt = karg->buf_entry_list.num_of_entries; in mpi3mr_bsg_process_mpt_cmds()
998 drv_bufs = kzalloc((sizeof(*drv_bufs) * bufcnt), GFP_KERNEL); in mpi3mr_bsg_process_mpt_cmds()
1027 for (count = 0; count < bufcnt; count++, buf_entries++, drv_buf_iter++) { in mpi3mr_bsg_process_mpt_cmds()
[all …]
/drivers/mailbox/
Dbcm-pdc-mailbox.c718 u32 bufcnt; /* Number of bytes of buffer pointed to by descriptor */ in pdc_tx_list_sg_add() local
749 bufcnt = sg_dma_len(sg); in pdc_tx_list_sg_add()
751 while (bufcnt > PDC_DMA_BUF_MAX) { in pdc_tx_list_sg_add()
755 bufcnt -= PDC_DMA_BUF_MAX; in pdc_tx_list_sg_add()
766 pdc_build_txd(pdcs, databufptr, bufcnt, flags | eot); in pdc_tx_list_sg_add()
885 u32 bufcnt; /* Number of bytes of buffer pointed to by descriptor */ in pdc_rx_list_sg_add() local
907 bufcnt = sg_dma_len(sg); in pdc_rx_list_sg_add()
909 while (bufcnt > PDC_DMA_BUF_MAX) { in pdc_rx_list_sg_add()
912 bufcnt -= PDC_DMA_BUF_MAX; in pdc_rx_list_sg_add()
919 pdc_build_rxd(pdcs, databufptr, bufcnt, flags); in pdc_rx_list_sg_add()
/drivers/net/ethernet/sgi/
Dioc3-eth.c613 u32 cmd, bufcnt, len; in ioc3_tx_unmap() local
617 bufcnt = be32_to_cpu(desc->bufcnt); in ioc3_tx_unmap()
619 len = (bufcnt & ETXD_B1CNT_MASK) >> ETXD_B1CNT_SHIFT; in ioc3_tx_unmap()
624 len = (bufcnt & ETXD_B2CNT_MASK) >> ETXD_B2CNT_SHIFT; in ioc3_tx_unmap()
1052 desc->bufcnt = cpu_to_be32(len); in ioc3_start_xmit()
1061 desc->bufcnt = cpu_to_be32((s1 << ETXD_B1CNT_SHIFT) | in ioc3_start_xmit()
1078 desc->bufcnt = cpu_to_be32(len << ETXD_B1CNT_SHIFT); in ioc3_start_xmit()
/drivers/block/aoe/
Daoe.h66 __be16 bufcnt; member
Daoecmd.c1551 n = be16_to_cpu(ch->bufcnt); in aoecmd_cfg_rsp()