/drivers/nvdimm/ |
D | blk.c | 80 unsigned int cur_len; in nd_blk_rw_integrity() local 91 cur_len = min(len, bv.bv_len); in nd_blk_rw_integrity() 94 cur_len, rw); in nd_blk_rw_integrity() 99 len -= cur_len; in nd_blk_rw_integrity() 100 dev_offset += cur_len; in nd_blk_rw_integrity() 101 if (!bvec_iter_advance(bip->bip_vec, &bip->bip_iter, cur_len)) in nd_blk_rw_integrity() 130 unsigned int cur_len; in nsblk_do_bvec() local 138 cur_len = bip ? min(len, sector_size) : len; in nsblk_do_bvec() 142 dev_offset = to_dev_offset(nsblk, ns_offset, cur_len); in nsblk_do_bvec() 147 err = ndbr->do_io(ndbr, dev_offset, iobuf + off, cur_len, rw); in nsblk_do_bvec() [all …]
|
D | btt.c | 1155 unsigned int cur_len; in btt_rw_integrity() local 1166 cur_len = min(len, bv.bv_len); in btt_rw_integrity() 1170 mem + bv.bv_offset, cur_len, in btt_rw_integrity() 1174 mem + bv.bv_offset, cur_len, in btt_rw_integrity() 1181 len -= cur_len; in btt_rw_integrity() 1182 meta_nsoff += cur_len; in btt_rw_integrity() 1183 if (!bvec_iter_advance(bip->bip_vec, &bip->bip_iter, cur_len)) in btt_rw_integrity() 1208 u32 cur_len; in btt_read_pg() local 1216 cur_len = min(btt->sector_size, len); in btt_read_pg() 1233 zero_fill_data(page, off, cur_len); in btt_read_pg() [all …]
|
/drivers/spi/ |
D | spi-sh.c | 158 int cur_len; in spi_sh_send() local 167 cur_len = min(SPI_SH_FIFO_SIZE, remain); in spi_sh_send() 168 for (i = 0; i < cur_len && in spi_sh_send() 182 cur_len = i; in spi_sh_send() 184 remain -= cur_len; in spi_sh_send() 185 data += cur_len; in spi_sh_send() 223 int cur_len; in spi_sh_receive() local 252 cur_len = min(SPI_SH_FIFO_SIZE, remain); in spi_sh_receive() 253 for (i = 0; i < cur_len; i++) { in spi_sh_receive() 259 remain -= cur_len; in spi_sh_receive() [all …]
|
D | spi-tegra20-sflash.c | 116 unsigned cur_len; member
|
D | spi-tegra20-slink.c | 166 unsigned cur_len; member
|
/drivers/net/wireless/mediatek/mt76/ |
D | mcu.c | 112 int err, cur_len; in mt76_mcu_send_firmware() local 115 cur_len = min_t(int, 4096 - dev->mcu_ops->headroom, len); in mt76_mcu_send_firmware() 117 err = mt76_mcu_send_msg(dev, cmd, data, cur_len, false); in mt76_mcu_send_firmware() 121 data += cur_len; in mt76_mcu_send_firmware() 122 len -= cur_len; in mt76_mcu_send_firmware()
|
D | mt76x02_dfs.c | 428 u16 cur_len) in mt76x02_dfs_create_sequence() argument 497 if (seq.count <= cur_len) in mt76x02_dfs_create_sequence()
|
/drivers/input/rmi4/ |
D | rmi_smbus.c | 143 int cur_len = (int)len; in rmi_smb_write_block() local 147 while (cur_len > 0) { in rmi_smb_write_block() 164 cur_len -= SMB_MAX_COUNT; in rmi_smb_write_block() 196 int cur_len = (int)len; in rmi_smb_read_block() local 201 while (cur_len > 0) { in rmi_smb_read_block() 203 int block_len = min_t(int, cur_len, SMB_MAX_COUNT); in rmi_smb_read_block() 216 cur_len -= SMB_MAX_COUNT; in rmi_smb_read_block()
|
/drivers/md/ |
D | dm-ebs-target.c | 68 unsigned int cur_len; in __ebs_rw_bvec() local 81 cur_len = min(dm_bufio_get_block_size(ec->bufio) - buf_off, bv_len); in __ebs_rw_bvec() 99 memcpy(pa, ba, cur_len); in __ebs_rw_bvec() 103 memcpy(ba, pa, cur_len); in __ebs_rw_bvec() 104 dm_bufio_mark_partial_buffer_dirty(b, buf_off, buf_off + cur_len); in __ebs_rw_bvec() 110 pa += cur_len; in __ebs_rw_bvec() 111 bv_len -= cur_len; in __ebs_rw_bvec()
|
/drivers/platform/mellanox/ |
D | mlxbf-tmfifo.c | 77 int cur_len; member 589 if (vring->cur_len + sizeof(u64) <= len) { in mlxbf_tmfifo_rxtx_word() 593 memcpy(addr + vring->cur_len, &data, in mlxbf_tmfifo_rxtx_word() 596 memcpy(&data, addr + vring->cur_len, in mlxbf_tmfifo_rxtx_word() 599 vring->cur_len += sizeof(u64); in mlxbf_tmfifo_rxtx_word() 604 memcpy(addr + vring->cur_len, &data, in mlxbf_tmfifo_rxtx_word() 605 len - vring->cur_len); in mlxbf_tmfifo_rxtx_word() 608 memcpy(&data, addr + vring->cur_len, in mlxbf_tmfifo_rxtx_word() 609 len - vring->cur_len); in mlxbf_tmfifo_rxtx_word() 611 vring->cur_len = len; in mlxbf_tmfifo_rxtx_word() [all …]
|
/drivers/mmc/host/ |
D | mmci_stm32_sdmmc.c | 422 int cur_len = 0, max_len = 0, end_of_len = 0; in sdmmc_dlyb_phase_tuning() local 429 cur_len = 0; in sdmmc_dlyb_phase_tuning() 431 cur_len++; in sdmmc_dlyb_phase_tuning() 432 if (cur_len > max_len) { in sdmmc_dlyb_phase_tuning() 433 max_len = cur_len; in sdmmc_dlyb_phase_tuning()
|
/drivers/usb/musb/ |
D | ux500_dma.c | 35 unsigned int cur_len; member 63 ux500_channel->channel.actual_len = ux500_channel->cur_len; in ux500_dma_callback() 91 ux500_channel->cur_len = len; in ux500_configure_channel()
|
/drivers/iommu/ |
D | dma-iommu.c | 910 unsigned int cur_len = 0, max_len = dma_get_max_seg_size(dev); in __finalise_sg() local 931 if (cur_len && !s_iova_off && (dma_addr & seg_mask) && in __finalise_sg() 932 (max_len - cur_len >= s_length)) { in __finalise_sg() 934 cur_len += s_length; in __finalise_sg() 939 cur_len = s_length; in __finalise_sg() 945 sg_dma_len(cur) = cur_len; in __finalise_sg() 949 cur_len = 0; in __finalise_sg()
|
/drivers/net/ethernet/google/gve/ |
D | gve_tx_dqo.c | 332 u32 cur_len = min_t(u32, len, GVE_TX_MAX_BUF_SIZE_DQO); in gve_tx_fill_pkt_desc_dqo() local 333 bool cur_eop = eop && cur_len == len; in gve_tx_fill_pkt_desc_dqo() 341 .buf_size = cur_len, in gve_tx_fill_pkt_desc_dqo() 344 addr += cur_len; in gve_tx_fill_pkt_desc_dqo() 345 len -= cur_len; in gve_tx_fill_pkt_desc_dqo()
|
/drivers/net/ethernet/atheros/ |
D | ag71xx.c | 1532 unsigned int cur_len = len; in ag71xx_fill_dma_desc() local 1540 if (cur_len > split) { in ag71xx_fill_dma_desc() 1541 cur_len = split; in ag71xx_fill_dma_desc() 1547 cur_len -= 4; in ag71xx_fill_dma_desc() 1551 addr += cur_len; in ag71xx_fill_dma_desc() 1552 len -= cur_len; in ag71xx_fill_dma_desc() 1555 cur_len |= DESC_MORE; in ag71xx_fill_dma_desc() 1559 cur_len |= DESC_EMPTY; in ag71xx_fill_dma_desc() 1561 desc->ctrl = cur_len; in ag71xx_fill_dma_desc()
|
/drivers/net/ethernet/broadcom/bnx2x/ |
D | bnx2x_init_ops.h | 94 u32 cur_len = min(buf_len32, len - i); in bnx2x_init_fill() local 96 bnx2x_write_big_buf(bp, addr + i*4, cur_len, wb); in bnx2x_init_fill() 133 u32 cur_len = min(buf_len32, len - i); in bnx2x_init_wr_64() local 135 bnx2x_write_big_buf_wb(bp, addr + i*4, cur_len); in bnx2x_init_wr_64()
|
D | bnx2x_sp.c | 180 int cur_len = 0, rc; in bnx2x_exe_queue_step() local 207 if (cur_len + elem->cmd_len <= o->exe_chunk_len) { in bnx2x_exe_queue_step() 208 cur_len += elem->cmd_len; in bnx2x_exe_queue_step() 222 if (!cur_len) in bnx2x_exe_queue_step()
|
/drivers/target/ |
D | target_core_configfs.c | 2669 ssize_t len = 0, cur_len; in target_lu_gp_members_show() local 2677 cur_len = snprintf(buf, LU_GROUP_NAME_BUF, "%s/%s\n", in target_lu_gp_members_show() 2680 cur_len++; /* Extra byte for NULL terminator */ in target_lu_gp_members_show() 2682 if ((cur_len + len) > PAGE_SIZE) { in target_lu_gp_members_show() 2687 memcpy(page+len, buf, cur_len); in target_lu_gp_members_show() 2688 len += cur_len; in target_lu_gp_members_show() 3067 ssize_t len = 0, cur_len; in target_tg_pt_gp_members_show() local 3075 cur_len = snprintf(buf, TG_PT_GROUP_NAME_BUF, "%s/%s/tpgt_%hu" in target_tg_pt_gp_members_show() 3080 cur_len++; /* Extra byte for NULL terminator */ in target_tg_pt_gp_members_show() 3082 if ((cur_len + len) > PAGE_SIZE) { in target_tg_pt_gp_members_show() [all …]
|
/drivers/ata/ |
D | pata_mpc52xx.c | 459 u32 cur_len = sg_dma_len(sg); in mpc52xx_ata_build_dmatable() local 461 while (cur_len) { in mpc52xx_ata_build_dmatable() 462 unsigned int tc = min(cur_len, MAX_DMA_BUFFER_SIZE); in mpc52xx_ata_build_dmatable() 481 cur_len -= tc; in mpc52xx_ata_build_dmatable()
|
/drivers/target/iscsi/cxgbit/ |
D | cxgbit_target.c | 359 u32 cur_len = min_t(u32, data_length, sg->length - page_off); in cxgbit_map_skb() local 365 cur_len); in cxgbit_map_skb() 366 skb->data_len += cur_len; in cxgbit_map_skb() 367 skb->len += cur_len; in cxgbit_map_skb() 368 skb->truesize += cur_len; in cxgbit_map_skb() 370 data_length -= cur_len; in cxgbit_map_skb()
|
/drivers/target/iscsi/ |
D | iscsi_target.c | 909 u32 cur_len; in iscsit_map_iovec() local 914 cur_len = min_t(u32, data_length, sg->length - page_off); in iscsit_map_iovec() 917 iov[i].iov_len = cur_len; in iscsit_map_iovec() 919 data_length -= cur_len; in iscsit_map_iovec() 1407 u32 cur_len = min_t(u32, data_length, sg->length); in iscsit_do_crypto_hash_sg() local 1409 ahash_request_set_crypt(hash, sg, NULL, cur_len); in iscsit_do_crypto_hash_sg() 1412 data_length -= cur_len; in iscsit_do_crypto_hash_sg()
|
/drivers/staging/media/atomisp/pci/ |
D | atomisp_ioctl.c | 590 const int cur_len = strlen(input->name); in atomisp_enum_input() local 591 const int max_size = sizeof(input->name) - cur_len - 1; in atomisp_enum_input() 594 input->name[cur_len] = '+'; in atomisp_enum_input() 595 strscpy(&input->name[cur_len + 1], in atomisp_enum_input()
|
/drivers/gpu/drm/ |
D | drm_dp_mst_topology.c | 449 raw->cur_len = idx; in drm_dp_encode_sideband_req() 698 txmsg->cur_offset, txmsg->cur_len, txmsg->seqno, in drm_dp_mst_dump_sideband_msg_tx() 739 raw->cur_len = idx; in drm_dp_encode_sideband_reply() 2855 len = txmsg->cur_len - txmsg->cur_offset; in process_single_tx_qlock() 2861 if (len == txmsg->cur_len) in process_single_tx_qlock() 2886 if (txmsg->cur_offset == txmsg->cur_len) { in process_single_tx_qlock()
|
/drivers/scsi/ |
D | ipr.c | 2970 int cur_len, rc, rem_len, rem_page_len, max_dump_size; in ipr_sdt_copy() local 2999 cur_len = min(rem_len, rem_page_len); in ipr_sdt_copy() 3008 (cur_len / sizeof(u32))); in ipr_sdt_copy() 3013 ioa_dump->page_offset += cur_len; in ipr_sdt_copy() 3014 bytes_copied += cur_len; in ipr_sdt_copy()
|