/drivers/parisc/ |
D | iommu-helpers.h | 22 unsigned long dma_offset = 0, dma_len = 0; in iommu_fill_pdir() local 47 BUG_ON(pdirp && (dma_len != sg_dma_len(dma_sg))); in iommu_fill_pdir() 51 dma_len = sg_dma_len(startsg); in iommu_fill_pdir() 106 unsigned long dma_offset, dma_len; /* start/len of DMA stream */ in iommu_coalesce_chunks() local 120 dma_len = startsg->length; in iommu_coalesce_chunks() 149 if (unlikely(ALIGN(dma_len + dma_offset + startsg->length, IOVP_SIZE) > in iommu_coalesce_chunks() 163 dma_len += startsg->length; in iommu_coalesce_chunks() 171 sg_dma_len(contig_sg) = dma_len; in iommu_coalesce_chunks() 172 dma_len = ALIGN(dma_len + dma_offset, IOVP_SIZE); in iommu_coalesce_chunks() 175 | (iommu_alloc_range(ioc, dev, dma_len) << IOVP_SHIFT) in iommu_coalesce_chunks()
|
/drivers/net/ethernet/netronome/nfp/nfdk/ |
D | dp.c | 241 unsigned int dma_len, type; in nfp_nfdk_tx() local 282 dma_len = skb_headlen(skb); in nfp_nfdk_tx() 285 else if (!nr_frags && dma_len <= NFDK_TX_MAX_DATA_PER_HEAD) in nfp_nfdk_tx() 290 dma_addr = dma_map_single(dp->dev, skb->data, dma_len, DMA_TO_DEVICE); in nfp_nfdk_tx() 301 dma_len -= 1; in nfp_nfdk_tx() 310 dma_len > NFDK_DESC_TX_DMA_LEN_HEAD ? in nfp_nfdk_tx() 311 NFDK_DESC_TX_DMA_LEN_HEAD : dma_len) | in nfp_nfdk_tx() 324 dma_len -= tmp_dlen; in nfp_nfdk_tx() 335 while (dma_len > 0) { in nfp_nfdk_tx() 336 dma_len -= 1; in nfp_nfdk_tx() [all …]
|
/drivers/net/ethernet/mellanox/mlx5/core/en/ |
D | xdp.h | 147 u32 dma_len = xdptxd->len; in mlx5e_xdp_mpwqe_add_dseg() local 150 session->bytes_count += dma_len; in mlx5e_xdp_mpwqe_add_dseg() 152 if (session->inline_on && dma_len <= MLX5E_XDP_INLINE_WQE_SZ_THRSD) { in mlx5e_xdp_mpwqe_add_dseg() 155 u16 ds_len = sizeof(*inline_dseg) + dma_len; in mlx5e_xdp_mpwqe_add_dseg() 158 inline_dseg->byte_count = cpu_to_be32(dma_len | MLX5_INLINE_SEG); in mlx5e_xdp_mpwqe_add_dseg() 159 memcpy(inline_dseg->data, xdptxd->data, dma_len); in mlx5e_xdp_mpwqe_add_dseg() 167 dseg->byte_count = cpu_to_be32(dma_len); in mlx5e_xdp_mpwqe_add_dseg()
|
D | xdp.c | 371 u32 dma_len = xdptxd->len; in mlx5e_xmit_xdp_frame() local 379 if (unlikely(dma_len < MLX5E_XDP_MIN_INLINE || sq->hw_mtu < dma_len)) { in mlx5e_xmit_xdp_frame() 422 dma_len -= MLX5E_XDP_MIN_INLINE; in mlx5e_xmit_xdp_frame() 430 dseg->byte_count = cpu_to_be32(dma_len); in mlx5e_xmit_xdp_frame()
|
/drivers/net/wireless/broadcom/brcm80211/brcmsmac/ |
D | brcms_trace_brcmsmac_tx.h | 76 u16 dma_len), 78 dma_len), 85 __field(u16, dma_len) 93 __entry->dma_len = dma_len; 98 __entry->ampdu_frames, __entry->dma_len)
|
D | ampdu.c | 509 session->dma_len = 0; in brcms_c_ampdu_reset_session() 568 session->dma_len += p->len; in brcms_c_ampdu_add_frame() 644 u16 dma_len = session->dma_len; in brcms_c_ampdu_finalize() local 768 dma_len = min(dma_len, f->ampdu_pld_size); in brcms_c_ampdu_finalize() 769 txh->PreloadSize = cpu_to_le16(dma_len); in brcms_c_ampdu_finalize()
|
/drivers/i2c/busses/ |
D | i2c-stm32.c | 88 dma->dma_len = 0; in stm32_i2c_dma_free() 118 dma->dma_len = len; in stm32_i2c_prep_dma_xfer() 121 dma->dma_buf = dma_map_single(chan_dev, buf, dma->dma_len, in stm32_i2c_prep_dma_xfer() 129 dma->dma_len, in stm32_i2c_prep_dma_xfer() 153 dma_unmap_single(chan_dev, dma->dma_buf, dma->dma_len, in stm32_i2c_prep_dma_xfer()
|
/drivers/net/ethernet/sfc/ |
D | tx_tso.c | 107 unsigned int dma_len; in efx_tx_queue_insert() local 121 dma_len = tx_queue->efx->type->tx_limit_len(tx_queue, in efx_tx_queue_insert() 125 if (dma_len >= len) in efx_tx_queue_insert() 128 buffer->len = dma_len; in efx_tx_queue_insert() 130 dma_addr += dma_len; in efx_tx_queue_insert() 131 len -= dma_len; in efx_tx_queue_insert()
|
D | tx_common.c | 311 unsigned int dma_len; in efx_tx_map_chunk() local 318 dma_len = nic_type->tx_limit_len(tx_queue, dma_addr, len); in efx_tx_map_chunk() 320 dma_len = len; in efx_tx_map_chunk() 322 buffer->len = dma_len; in efx_tx_map_chunk() 325 len -= dma_len; in efx_tx_map_chunk() 326 dma_addr += dma_len; in efx_tx_map_chunk()
|
/drivers/net/ethernet/sfc/siena/ |
D | tx_common.c | 292 unsigned int dma_len; in efx_siena_tx_map_chunk() local 299 dma_len = nic_type->tx_limit_len(tx_queue, dma_addr, len); in efx_siena_tx_map_chunk() 301 dma_len = len; in efx_siena_tx_map_chunk() 303 buffer->len = dma_len; in efx_siena_tx_map_chunk() 306 len -= dma_len; in efx_siena_tx_map_chunk() 307 dma_addr += dma_len; in efx_siena_tx_map_chunk()
|
/drivers/scsi/ |
D | am53c974.c | 243 static u32 pci_esp_dma_length_limit(struct esp *esp, u32 dma_addr, u32 dma_len) in pci_esp_dma_length_limit() argument 255 if (dma_len > (1U << dma_limit)) in pci_esp_dma_length_limit() 256 dma_len = (1U << dma_limit); in pci_esp_dma_length_limit() 262 end = base + dma_len; in pci_esp_dma_length_limit() 265 dma_len = end - base; in pci_esp_dma_length_limit() 267 return dma_len; in pci_esp_dma_length_limit()
|
D | zorro_esp.c | 219 u32 dma_len) in zorro_esp_dma_length_limit() argument 221 return dma_len > (1U << 16) ? (1U << 16) : dma_len; in zorro_esp_dma_length_limit() 225 u32 dma_len) in fastlane_esp_dma_length_limit() argument 228 return dma_len > 0xfffc ? 0xfffc : dma_len; in fastlane_esp_dma_length_limit()
|
D | mac53c94.c | 364 u32 dma_len; in set_dma_cmds() local 378 dma_len = sg_dma_len(scl); in set_dma_cmds() 379 if (dma_len > 0xffff) in set_dma_cmds() 381 total += dma_len; in set_dma_cmds() 382 dcmds->req_count = cpu_to_le16(dma_len); in set_dma_cmds()
|
D | esp_scsi.c | 522 static u32 esp_dma_length_limit(struct esp *esp, u32 dma_addr, u32 dma_len) in esp_dma_length_limit() argument 526 if (dma_len > (1U << 24)) in esp_dma_length_limit() 527 dma_len = (1U << 24); in esp_dma_length_limit() 537 if (dma_len > (1U << 16)) in esp_dma_length_limit() 538 dma_len = (1U << 16); in esp_dma_length_limit() 544 end = base + dma_len; in esp_dma_length_limit() 547 dma_len = end - base; in esp_dma_length_limit() 549 return dma_len; in esp_dma_length_limit() 1742 unsigned int dma_len = esp_cur_dma_len(ent, cmd); in esp_process_event() local 1753 dma_len = esp->ops->dma_length_limit(esp, dma_addr, in esp_process_event() [all …]
|
D | mac_esp.c | 247 static u32 mac_esp_dma_length_limit(struct esp *esp, u32 dma_addr, u32 dma_len) in mac_esp_dma_length_limit() argument 249 return dma_len > 0xFFFF ? 0xFFFF : dma_len; in mac_esp_dma_length_limit()
|
/drivers/net/wireless/mediatek/mt7601u/ |
D | dma.c | 121 u16 dma_len = get_unaligned_le16(data); in mt7601u_rx_next_seg_len() local 124 WARN_ON_ONCE(!dma_len) || in mt7601u_rx_next_seg_len() 125 WARN_ON_ONCE(dma_len + MT_DMA_HDRS > data_len) || in mt7601u_rx_next_seg_len() 126 WARN_ON_ONCE(dma_len & 0x3) || in mt7601u_rx_next_seg_len() 127 WARN_ON_ONCE(dma_len < min_seg_len)) in mt7601u_rx_next_seg_len() 130 return MT_DMA_HDRS + dma_len; in mt7601u_rx_next_seg_len()
|
/drivers/net/ethernet/chelsio/cxgb/ |
D | sge.c | 156 DEFINE_DMA_UNMAP_LEN(dma_len); 162 DEFINE_DMA_UNMAP_LEN(dma_len); 507 dma_unmap_len(ce, dma_len), DMA_FROM_DEVICE); in free_freelQ_buffers() 621 if (likely(dma_unmap_len(ce, dma_len))) { in free_cmdQ_buffers() 624 dma_unmap_len(ce, dma_len), in free_cmdQ_buffers() 826 unsigned int dma_len = q->rx_buffer_size - q->dma_offset; in refill_free_list() local 837 mapping = dma_map_single(&pdev->dev, skb->data, dma_len, in refill_free_list() 843 dma_unmap_len_set(ce, dma_len, dma_len); in refill_free_list() 846 e->len_gen = V_CMD_LEN(dma_len) | V_CMD_GEN1(q->genbit); in refill_free_list() 1055 dma_unmap_len(ce, dma_len), in get_packet() [all …]
|
/drivers/nvme/host/ |
D | apple.c | 165 unsigned int dma_len; /* length of single DMA segment mapping */ member 362 if (iod->dma_len) { in apple_nvme_unmap_data() 363 dma_unmap_page(anv->dev, iod->first_dma, iod->dma_len, in apple_nvme_unmap_data() 401 int dma_len = sg_dma_len(sg); in apple_nvme_setup_prps() local 415 dma_len -= (NVME_CTRL_PAGE_SIZE - offset); in apple_nvme_setup_prps() 416 if (dma_len) { in apple_nvme_setup_prps() 421 dma_len = sg_dma_len(sg); in apple_nvme_setup_prps() 460 dma_len -= NVME_CTRL_PAGE_SIZE; in apple_nvme_setup_prps() 465 if (dma_len > 0) in apple_nvme_setup_prps() 467 if (unlikely(dma_len < 0)) in apple_nvme_setup_prps() [all …]
|
/drivers/gpu/drm/kmb/ |
D | kmb_plane.c | 354 unsigned int dma_len; in kmb_plane_atomic_update() local 397 dma_len = (width * height * fb->format->cpp[0]); in kmb_plane_atomic_update() 398 drm_dbg(&kmb->drm, "dma_len=%d ", dma_len); in kmb_plane_atomic_update() 399 kmb_write_lcd(kmb, LCD_LAYERn_DMA_LEN(plane_id), dma_len); in kmb_plane_atomic_update() 400 kmb_write_lcd(kmb, LCD_LAYERn_DMA_LEN_SHADOW(plane_id), dma_len); in kmb_plane_atomic_update()
|
/drivers/net/ethernet/sfc/falcon/ |
D | tx.c | 187 unsigned int dma_len; in ef4_tx_map_chunk() local 192 dma_len = nic_type->tx_limit_len(tx_queue, dma_addr, len); in ef4_tx_map_chunk() 194 buffer->len = dma_len; in ef4_tx_map_chunk() 197 len -= dma_len; in ef4_tx_map_chunk() 198 dma_addr += dma_len; in ef4_tx_map_chunk()
|
/drivers/crypto/ |
D | talitos.c | 336 return ((struct talitos_desc *)(edesc->buf + edesc->dma_len))->hdr1; in get_request_hdr() 496 (edesc->buf + edesc->dma_len))->hdr; 1007 if (edesc->dma_len) in ipsec_esp_unmap() 1008 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len, in ipsec_esp_unmap() 1058 oicv = edesc->buf + edesc->dma_len; in ipsec_esp_decrypt_swauth_done() 1215 dma_addr_t dma_icv = edesc->dma_link_tbl + edesc->dma_len - authsize; in ipsec_esp() 1305 edesc->dma_len, in ipsec_esp() 1332 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len; in talitos_edesc_alloc() local 1380 dma_len = (src_nents ? src_len : 0) + in talitos_edesc_alloc() 1383 dma_len = (src_nents + dst_nents + 2) * in talitos_edesc_alloc() [all …]
|
/drivers/ata/ |
D | pata_arasan_cf.c | 434 u32 xfer_cnt, sglen, dma_len, xfer_ctr; in sg_xfer() local 474 dma_len = min(xfer_cnt, FIFO_SIZE); in sg_xfer() 475 ret = dma_xfer(acdev, src, dest, dma_len); in sg_xfer() 482 src += dma_len; in sg_xfer() 484 dest += dma_len; in sg_xfer() 486 sglen -= dma_len; in sg_xfer() 487 xfer_cnt -= dma_len; in sg_xfer()
|
/drivers/crypto/marvell/octeontx2/ |
D | otx2_cpt_reqmgr.h | 145 u32 dma_len; member 168 info->dma_len, DMA_BIDIRECTIONAL); in otx2_cpt_info_destroy()
|
/drivers/net/ethernet/broadcom/bnxt/ |
D | bnxt_coredump.c | 29 dma_buf = hwrm_req_dma_slice(bp, msg, info->dma_len, &dma_handle); in bnxt_hwrm_dbg_dma_data() 42 cmn_req->host_buf_len = cpu_to_le32(info->dma_len); in bnxt_hwrm_dbg_dma_data() 104 info.dma_len = COREDUMP_LIST_BUF_LEN; in bnxt_hwrm_dbg_coredump_list() 150 info.dma_len = COREDUMP_RETRIEVE_BUF_LEN; in bnxt_hwrm_dbg_coredump_retrieve()
|
/drivers/crypto/marvell/octeontx/ |
D | otx_cptvf_reqmgr.h | 186 u32 dma_len; member 198 info->dma_len, DMA_BIDIRECTIONAL); in do_request_cleanup()
|