/kernel/linux/linux-5.10/kernel/dma/ |
D | coherent.c | 27 if (dev && dev->dma_mem) in dev_get_coherent_memory() 28 return dev->dma_mem; in dev_get_coherent_memory() 44 struct dma_coherent_mem *dma_mem = NULL; in dma_init_coherent_memory() local 60 dma_mem = kzalloc(sizeof(struct dma_coherent_mem), GFP_KERNEL); in dma_init_coherent_memory() 61 if (!dma_mem) { in dma_init_coherent_memory() 65 dma_mem->bitmap = kzalloc(bitmap_size, GFP_KERNEL); in dma_init_coherent_memory() 66 if (!dma_mem->bitmap) { in dma_init_coherent_memory() 71 dma_mem->virt_base = mem_base; in dma_init_coherent_memory() 72 dma_mem->device_base = device_addr; in dma_init_coherent_memory() 73 dma_mem->pfn_base = PFN_DOWN(phys_addr); in dma_init_coherent_memory() [all …]
|
/kernel/linux/linux-5.10/drivers/scsi/be2iscsi/ |
D | be.h | 31 struct be_dma_mem dma_mem; member 53 return q->dma_mem.va + q->head * q->entry_size; in queue_head_node() 58 return q->dma_mem.va + wrb_num * q->entry_size; in queue_get_wrb() 63 return q->dma_mem.va + q->tail * q->entry_size; in queue_tail_node()
|
D | be_cmds.c | 741 struct be_dma_mem *q_mem = &eq->dma_mem; in beiscsi_cmd_eq_create() 783 struct be_dma_mem *q_mem = &cq->dma_mem; in beiscsi_cmd_cq_create() 856 struct be_dma_mem *q_mem = &mccq->dma_mem; in beiscsi_cmd_mccq_create() 982 struct be_dma_mem *q_mem = &dq->dma_mem; in be_cmd_create_default_pdu_queue()
|
D | be_main.c | 2981 struct be_dma_mem *mem = &q->dma_mem; in be_fill_queue() 3013 mem = &eq->dma_mem; in beiscsi_create_eqs() 3051 mem = &eq->dma_mem; in beiscsi_create_eqs() 3080 mem = &cq->dma_mem; in beiscsi_create_cqs() 3116 mem = &cq->dma_mem; in beiscsi_create_cqs() 3141 mem = &dq->dma_mem; in beiscsi_create_def_hdr() 3193 mem = &dataq->dma_mem; in beiscsi_create_def_data() 3308 struct be_dma_mem *mem = &q->dma_mem; in be_queue_free() 3319 struct be_dma_mem *mem = &q->dma_mem; in be_queue_alloc()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/qlogic/qed/ |
D | qed_cxt.c | 776 if (!p_t2 || !p_t2->dma_mem) in qed_cxt_src_t2_free() 780 if (p_t2->dma_mem[i].virt_addr) in qed_cxt_src_t2_free() 782 p_t2->dma_mem[i].size, in qed_cxt_src_t2_free() 783 p_t2->dma_mem[i].virt_addr, in qed_cxt_src_t2_free() 784 p_t2->dma_mem[i].phys_addr); in qed_cxt_src_t2_free() 786 kfree(p_t2->dma_mem); in qed_cxt_src_t2_free() 787 p_t2->dma_mem = NULL; in qed_cxt_src_t2_free() 797 if (!p_t2 || !p_t2->dma_mem) in qed_cxt_t2_alloc_pages() 802 p_virt = &p_t2->dma_mem[i].virt_addr; in qed_cxt_t2_alloc_pages() 806 &p_t2->dma_mem[i].phys_addr, in qed_cxt_t2_alloc_pages() [all …]
|
D | qed_cxt.h | 291 struct phys_mem_desc *dma_mem; member
|
/kernel/linux/linux-5.10/drivers/scsi/ |
D | stex.c | 311 void *dma_mem; member 411 struct req_msg *req = hba->dma_mem + hba->req_head * hba->rq_size; in stex_alloc_req() 421 return (struct req_msg *)(hba->dma_mem + in stex_ss_alloc_req() 1147 msg_h = (struct st_msg_header *)hba->dma_mem; in stex_ss_handshake() 1737 hba->dma_mem = dma_alloc_coherent(&pdev->dev, in stex_probe() 1739 if (!hba->dma_mem) { in stex_probe() 1748 hba->dma_mem = dma_alloc_coherent(&pdev->dev, in stex_probe() 1752 if (!hba->dma_mem) { in stex_probe() 1769 hba->scratch = (__le32 *)(hba->dma_mem + scratch_offset); in stex_probe() 1770 hba->status_buffer = (struct status_msg *)(hba->dma_mem + sts_offset); in stex_probe() [all …]
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/i40iw/ |
D | i40iw_verbs.h | 128 struct i40iw_dma_mem dma_mem; member
|
D | i40iw_verbs.c | 357 i40iw_free_dma_mem(iwdev->sc_dev.hw, &iwqp->kqp.dma_mem); in i40iw_free_qp_resources() 448 struct i40iw_dma_mem *mem = &iwqp->kqp.dma_mem; in i40iw_setup_kmode_qp()
|
/kernel/linux/linux-5.10/drivers/staging/fsl-dpaa2/ethsw/ |
D | ethsw.c | 668 u8 *dma_mem; in dpaa2_switch_port_fdb_dump() local 671 dma_mem = kzalloc(fdb_dump_size, GFP_KERNEL); in dpaa2_switch_port_fdb_dump() 672 if (!dma_mem) in dpaa2_switch_port_fdb_dump() 675 fdb_dump_iova = dma_map_single(dev, dma_mem, fdb_dump_size, in dpaa2_switch_port_fdb_dump() 692 fdb_entries = (struct fdb_dump_entry *)dma_mem; in dpaa2_switch_port_fdb_dump() 707 kfree(dma_mem); in dpaa2_switch_port_fdb_dump() 714 kfree(dma_mem); in dpaa2_switch_port_fdb_dump()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/emulex/benet/ |
D | be.h | 131 struct be_dma_mem dma_mem; member 153 return q->dma_mem.va + q->head * q->entry_size; in queue_head_node() 158 return q->dma_mem.va + q->tail * q->entry_size; in queue_tail_node() 163 return q->dma_mem.va + index * q->entry_size; in queue_index_node()
|
D | be_cmds.c | 998 struct be_dma_mem *q_mem = &eqo->q.dma_mem; in be_cmd_eq_create() 1163 struct be_dma_mem *q_mem = &cq->dma_mem; in be_cmd_cq_create() 1241 struct be_dma_mem *q_mem = &mccq->dma_mem; in be_cmd_mccq_ext_create() 1306 struct be_dma_mem *q_mem = &mccq->dma_mem; in be_cmd_mccq_org_create() 1365 struct be_dma_mem *q_mem = &txq->dma_mem; in be_cmd_txq_create() 1413 struct be_dma_mem *q_mem = &rxq->dma_mem; in be_cmd_rxq_create()
|
D | be_main.c | 147 struct be_dma_mem *mem = &q->dma_mem; in be_queue_free() 159 struct be_dma_mem *mem = &q->dma_mem; in be_queue_alloc() 1436 entry = txo->q.dma_mem.va; in be_tx_timeout() 1446 entry = txo->cq.dma_mem.va; in be_tx_timeout()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/freescale/dpaa2/ |
D | dpaa2-eth.c | 3112 void *dma_mem, *key, *mask; in dpaa2_eth_set_vlan_qos() local 3126 dma_mem = kzalloc(DPAA2_CLASSIFIER_DMA_SIZE, GFP_KERNEL); in dpaa2_eth_set_vlan_qos() 3127 if (!dma_mem) in dpaa2_eth_set_vlan_qos() 3136 err = dpni_prepare_key_cfg(&kg_cfg, dma_mem); in dpaa2_eth_set_vlan_qos() 3145 qos_cfg.key_cfg_iova = dma_map_single(dev, dma_mem, in dpaa2_eth_set_vlan_qos() 3212 kfree(dma_mem); in dpaa2_eth_set_vlan_qos() 3658 u8 *dma_mem; in dpaa2_eth_set_dist_key() local 3688 dma_mem = kzalloc(DPAA2_CLASSIFIER_DMA_SIZE, GFP_KERNEL); in dpaa2_eth_set_dist_key() 3689 if (!dma_mem) in dpaa2_eth_set_dist_key() 3692 err = dpni_prepare_key_cfg(&cls_cfg, dma_mem); in dpaa2_eth_set_dist_key() [all …]
|
/kernel/linux/linux-5.10/include/linux/ |
D | device.h | 520 struct dma_coherent_mem *dma_mem; /* internal for coherent mem member
|