Home
last modified time | relevance | path

Searched refs:dma_list (Results 1 – 12 of 12) sorted by relevance

/drivers/infiniband/hw/mthca/
Dmthca_allocator.c200 u64 *dma_list = NULL; in mthca_buf_alloc() local
223 dma_list = kmalloc(npages * sizeof *dma_list, GFP_KERNEL); in mthca_buf_alloc()
224 if (!dma_list) in mthca_buf_alloc()
228 dma_list[i] = t + i * (1 << shift); in mthca_buf_alloc()
234 dma_list = kmalloc(npages * sizeof *dma_list, GFP_KERNEL); in mthca_buf_alloc()
235 if (!dma_list) in mthca_buf_alloc()
253 dma_list[i] = t; in mthca_buf_alloc()
261 dma_list, shift, npages, in mthca_buf_alloc()
269 kfree(dma_list); in mthca_buf_alloc()
277 kfree(dma_list); in mthca_buf_alloc()
Dmthca_eq.c471 u64 *dma_list = NULL; in mthca_create_eq() local
490 dma_list = kmalloc(npages * sizeof *dma_list, GFP_KERNEL); in mthca_create_eq()
491 if (!dma_list) in mthca_create_eq()
505 dma_list[i] = t; in mthca_create_eq()
519 dma_list, PAGE_SHIFT, npages, in mthca_create_eq()
551 kfree(dma_list); in mthca_create_eq()
582 kfree(dma_list); in mthca_create_eq()
/drivers/misc/genwqe/
Dcard_utils.c236 static void genwqe_unmap_pages(struct genwqe_dev *cd, dma_addr_t *dma_list, in genwqe_unmap_pages() argument
242 for (i = 0; (i < num_pages) && (dma_list[i] != 0x0); i++) { in genwqe_unmap_pages()
243 pci_unmap_page(pci_dev, dma_list[i], in genwqe_unmap_pages()
245 dma_list[i] = 0x0; in genwqe_unmap_pages()
251 dma_addr_t *dma_list) in genwqe_map_pages() argument
260 dma_list[i] = 0x0; in genwqe_map_pages()
273 dma_list[i] = daddr; in genwqe_map_pages()
278 genwqe_unmap_pages(cd, dma_list, num_pages); in genwqe_map_pages()
381 dma_addr_t *dma_list) in genwqe_setup_sgl() argument
416 daddr = dma_list[p] + map_offs; in genwqe_setup_sgl()
[all …]
Dcard_base.h180 dma_addr_t *dma_list; /* list of dma addresses per page */ member
376 dma_addr_t *dma_list);
Dcard_dev.c965 &m->dma_list[page_offs]); in ddcb_cmd_fixups()
/drivers/infiniband/core/
Dumem_odp.c280 umem->odp_data->dma_list = vzalloc(ib_umem_num_pages(umem) * in ib_umem_odp_get()
281 sizeof(*umem->odp_data->dma_list)); in ib_umem_odp_get()
282 if (!umem->odp_data->dma_list) { in ib_umem_odp_get()
340 vfree(umem->odp_data->dma_list); in ib_umem_odp_get()
411 vfree(umem->odp_data->dma_list); in ib_umem_odp_release()
458 if (!(umem->odp_data->dma_list[page_index])) { in ib_umem_odp_map_dma_single_page()
467 umem->odp_data->dma_list[page_index] = dma_addr | access_mask; in ib_umem_odp_map_dma_single_page()
471 umem->odp_data->dma_list[page_index] |= access_mask; in ib_umem_odp_map_dma_single_page()
643 dma_addr_t dma = umem->odp_data->dma_list[idx]; in ib_umem_odp_unmap_dma_pages()
667 umem->odp_data->dma_list[idx] = 0; in ib_umem_odp_unmap_dma_pages()
/drivers/vfio/
Dvfio_iommu_type1.c64 struct rb_root dma_list; member
99 struct rb_node *node = iommu->dma_list.rb_node; in vfio_find_dma()
117 struct rb_node **link = &iommu->dma_list.rb_node, *parent = NULL; in vfio_link_dma()
131 rb_insert_color(&new->node, &iommu->dma_list); in vfio_link_dma()
136 rb_erase(&old->node, &iommu->dma_list); in vfio_unlink_dma()
660 n = rb_first(&iommu->dma_list); in vfio_iommu_replay()
855 while ((node = rb_first(&iommu->dma_list))) in vfio_iommu_unmap_unpin_all()
918 iommu->dma_list = RB_ROOT; in vfio_iommu_type1_open()
/drivers/block/rsxx/
Ddma.c686 struct list_head dma_list[RSXX_MAX_TARGETS]; in rsxx_dma_queue_bio() local
704 INIT_LIST_HEAD(&dma_list[i]); in rsxx_dma_queue_bio()
715 st = rsxx_queue_discard(card, &dma_list[tgt], laddr, in rsxx_dma_queue_bio()
737 st = rsxx_queue_dma(card, &dma_list[tgt], in rsxx_dma_queue_bio()
755 if (!list_empty(&dma_list[i])) { in rsxx_dma_queue_bio()
758 list_splice_tail(&dma_list[i], &card->ctrl[i].queue); in rsxx_dma_queue_bio()
770 rsxx_cleanup_dma_queue(&card->ctrl[i], &dma_list[i], in rsxx_dma_queue_bio()
/drivers/vme/
Dvme.c774 struct vme_dma_list *dma_list; in vme_new_dma_list() local
783 dma_list = kmalloc(sizeof(struct vme_dma_list), GFP_KERNEL); in vme_new_dma_list()
784 if (dma_list == NULL) { in vme_new_dma_list()
788 INIT_LIST_HEAD(&dma_list->entries); in vme_new_dma_list()
789 dma_list->parent = ctrlr; in vme_new_dma_list()
790 mutex_init(&dma_list->mtx); in vme_new_dma_list()
792 return dma_list; in vme_new_dma_list()
/drivers/net/ethernet/mellanox/mlx4/
Deq.c965 u64 *dma_list = NULL; in mlx4_create_eq() local
986 dma_list = kmalloc(npages * sizeof *dma_list, GFP_KERNEL); in mlx4_create_eq()
987 if (!dma_list) in mlx4_create_eq()
1003 dma_list[i] = t; in mlx4_create_eq()
1023 err = mlx4_write_mtt(dev, &eq->mtt, 0, npages, dma_list); in mlx4_create_eq()
1043 kfree(dma_list); in mlx4_create_eq()
1073 kfree(dma_list); in mlx4_create_eq()
/drivers/infiniband/hw/mlx5/
Dmem.c171 dma_addr_t pa = umem->odp_data->dma_list[offset + i]; in __mlx5_ib_populate_pas()
Dodp.c83 if (umem->odp_data->dma_list[idx] & in mlx5_ib_invalidate_range()