| /drivers/dma-buf/ |
| D | dma-buf.c | 894 static struct sg_table * __map_dma_buf(struct dma_buf_attachment *attach, in __map_dma_buf() argument 900 sg_table = attach->dmabuf->ops->map_dma_buf(attach, direction); in __map_dma_buf() 904 if (!dma_buf_attachment_is_dynamic(attach)) { in __map_dma_buf() 905 ret = dma_resv_wait_timeout(attach->dmabuf->resv, in __map_dma_buf() 909 attach->dmabuf->ops->unmap_dma_buf(attach, sg_table, in __map_dma_buf() 1010 struct dma_buf_attachment *attach; in dma_buf_dynamic_attach() local 1019 attach = kzalloc(sizeof(*attach), GFP_KERNEL); in dma_buf_dynamic_attach() 1020 if (!attach) in dma_buf_dynamic_attach() 1023 attach->dev = dev; in dma_buf_dynamic_attach() 1024 attach->dmabuf = dmabuf; in dma_buf_dynamic_attach() [all …]
|
| /drivers/gpu/drm/xe/ |
| D | xe_dma_buf.c | 26 struct dma_buf_attachment *attach) in xe_dma_buf_attach() argument 28 struct drm_gem_object *obj = attach->dmabuf->priv; in xe_dma_buf_attach() 30 if (attach->peer2peer && in xe_dma_buf_attach() 31 pci_p2pdma_distance(to_pci_dev(obj->dev->dev), attach->dev, false) < 0) in xe_dma_buf_attach() 32 attach->peer2peer = false; in xe_dma_buf_attach() 34 if (!attach->peer2peer && !xe_bo_can_migrate(gem_to_xe_bo(obj), XE_PL_TT)) in xe_dma_buf_attach() 42 struct dma_buf_attachment *attach) in xe_dma_buf_detach() argument 44 struct drm_gem_object *obj = attach->dmabuf->priv; in xe_dma_buf_detach() 49 static int xe_dma_buf_pin(struct dma_buf_attachment *attach) in xe_dma_buf_pin() argument 51 struct drm_gem_object *obj = attach->dmabuf->priv; in xe_dma_buf_pin() [all …]
|
| /drivers/gpu/drm/amd/amdgpu/ |
| D | amdgpu_dma_buf.c | 56 static struct amdgpu_device *dma_buf_attach_adev(struct dma_buf_attachment *attach) in dma_buf_attach_adev() argument 58 if (attach->importer_ops == &amdgpu_dma_buf_attach_ops) { in dma_buf_attach_adev() 59 struct drm_gem_object *obj = attach->importer_priv; in dma_buf_attach_adev() 77 struct dma_buf_attachment *attach) in amdgpu_dma_buf_attach() argument 79 struct amdgpu_device *attach_adev = dma_buf_attach_adev(attach); in amdgpu_dma_buf_attach() 85 pci_p2pdma_distance(adev->pdev, attach->dev, false) < 0) in amdgpu_dma_buf_attach() 86 attach->peer2peer = false; in amdgpu_dma_buf_attach() 98 static int amdgpu_dma_buf_pin(struct dma_buf_attachment *attach) in amdgpu_dma_buf_pin() argument 100 struct drm_gem_object *obj = attach->dmabuf->priv; in amdgpu_dma_buf_pin() 114 static void amdgpu_dma_buf_unpin(struct dma_buf_attachment *attach) in amdgpu_dma_buf_unpin() argument [all …]
|
| /drivers/infiniband/core/ |
| D | umem_dmabuf.c | 24 dma_resv_assert_held(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_map_pages() 32 sgt = dma_buf_map_attachment(umem_dmabuf->attach, in ib_umem_dmabuf_map_pages() 75 ret = dma_resv_wait_timeout(umem_dmabuf->attach->dmabuf->resv, in ib_umem_dmabuf_map_pages() 88 dma_resv_assert_held(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_unmap_pages() 109 dma_buf_unmap_attachment(umem_dmabuf->attach, umem_dmabuf->sgt, in ib_umem_dmabuf_unmap_pages() 158 umem_dmabuf->attach = dma_buf_dynamic_attach( in ib_umem_dmabuf_get_with_dma_device() 163 if (IS_ERR(umem_dmabuf->attach)) { in ib_umem_dmabuf_get_with_dma_device() 164 ret = ERR_CAST(umem_dmabuf->attach); in ib_umem_dmabuf_get_with_dma_device() 188 ib_umem_dmabuf_unsupported_move_notify(struct dma_buf_attachment *attach) in ib_umem_dmabuf_unsupported_move_notify() argument 190 struct ib_umem_dmabuf *umem_dmabuf = attach->importer_priv; in ib_umem_dmabuf_unsupported_move_notify() [all …]
|
| /drivers/media/common/videobuf2/ |
| D | videobuf2-vmalloc.c | 215 struct vb2_vmalloc_attachment *attach; in vb2_vmalloc_dmabuf_ops_attach() local 224 attach = kzalloc(sizeof(*attach), GFP_KERNEL); in vb2_vmalloc_dmabuf_ops_attach() 225 if (!attach) in vb2_vmalloc_dmabuf_ops_attach() 228 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_attach() 231 kfree(attach); in vb2_vmalloc_dmabuf_ops_attach() 239 kfree(attach); in vb2_vmalloc_dmabuf_ops_attach() 246 attach->dma_dir = DMA_NONE; in vb2_vmalloc_dmabuf_ops_attach() 247 dbuf_attach->priv = attach; in vb2_vmalloc_dmabuf_ops_attach() 254 struct vb2_vmalloc_attachment *attach = db_attach->priv; in vb2_vmalloc_dmabuf_ops_detach() local 257 if (!attach) in vb2_vmalloc_dmabuf_ops_detach() [all …]
|
| D | videobuf2-dma-sg.c | 371 struct vb2_dma_sg_attachment *attach; in vb2_dma_sg_dmabuf_ops_attach() local 378 attach = kzalloc(sizeof(*attach), GFP_KERNEL); in vb2_dma_sg_dmabuf_ops_attach() 379 if (!attach) in vb2_dma_sg_dmabuf_ops_attach() 382 sgt = &attach->sgt; in vb2_dma_sg_dmabuf_ops_attach() 388 kfree(attach); in vb2_dma_sg_dmabuf_ops_attach() 400 attach->dma_dir = DMA_NONE; in vb2_dma_sg_dmabuf_ops_attach() 401 dbuf_attach->priv = attach; in vb2_dma_sg_dmabuf_ops_attach() 409 struct vb2_dma_sg_attachment *attach = db_attach->priv; in vb2_dma_sg_dmabuf_ops_detach() local 412 if (!attach) in vb2_dma_sg_dmabuf_ops_detach() 415 sgt = &attach->sgt; in vb2_dma_sg_dmabuf_ops_detach() [all …]
|
| D | videobuf2-dma-contig.c | 320 struct vb2_dc_attachment *attach; in vb2_dc_dmabuf_ops_attach() local 327 attach = kzalloc(sizeof(*attach), GFP_KERNEL); in vb2_dc_dmabuf_ops_attach() 328 if (!attach) in vb2_dc_dmabuf_ops_attach() 331 sgt = &attach->sgt; in vb2_dc_dmabuf_ops_attach() 337 kfree(attach); in vb2_dc_dmabuf_ops_attach() 349 attach->dma_dir = DMA_NONE; in vb2_dc_dmabuf_ops_attach() 350 dbuf_attach->priv = attach; in vb2_dc_dmabuf_ops_attach() 358 struct vb2_dc_attachment *attach = db_attach->priv; in vb2_dc_dmabuf_ops_detach() local 361 if (!attach) in vb2_dc_dmabuf_ops_detach() 364 sgt = &attach->sgt; in vb2_dc_dmabuf_ops_detach() [all …]
|
| /drivers/xen/ |
| D | gntdev-dmabuf.c | 48 struct dma_buf_attachment *attach; member 221 struct dma_buf_attachment *attach) in dmabuf_exp_ops_attach() argument 231 attach->priv = gntdev_dmabuf_attach; in dmabuf_exp_ops_attach() 236 struct dma_buf_attachment *attach) in dmabuf_exp_ops_detach() argument 238 struct gntdev_dmabuf_attachment *gntdev_dmabuf_attach = attach->priv; in dmabuf_exp_ops_detach() 245 dma_unmap_sgtable(attach->dev, sgt, in dmabuf_exp_ops_detach() 253 attach->priv = NULL; in dmabuf_exp_ops_detach() 258 dmabuf_exp_ops_map_dma_buf(struct dma_buf_attachment *attach, in dmabuf_exp_ops_map_dma_buf() argument 261 struct gntdev_dmabuf_attachment *gntdev_dmabuf_attach = attach->priv; in dmabuf_exp_ops_map_dma_buf() 262 struct gntdev_dmabuf *gntdev_dmabuf = attach->dmabuf->priv; in dmabuf_exp_ops_map_dma_buf() [all …]
|
| /drivers/gpu/drm/virtio/ |
| D | virtgpu_prime.c | 47 virtgpu_gem_map_dma_buf(struct dma_buf_attachment *attach, in virtgpu_gem_map_dma_buf() argument 50 struct drm_gem_object *obj = attach->dmabuf->priv; in virtgpu_gem_map_dma_buf() 54 return virtio_gpu_vram_map_dma_buf(bo, attach->dev, dir); in virtgpu_gem_map_dma_buf() 56 return drm_gem_map_dma_buf(attach, dir); in virtgpu_gem_map_dma_buf() 59 static void virtgpu_gem_unmap_dma_buf(struct dma_buf_attachment *attach, in virtgpu_gem_unmap_dma_buf() argument 63 struct drm_gem_object *obj = attach->dmabuf->priv; in virtgpu_gem_unmap_dma_buf() 67 virtio_gpu_vram_unmap_dma_buf(attach->dev, sgt, dir); in virtgpu_gem_unmap_dma_buf() 71 drm_gem_unmap_dma_buf(attach, sgt, dir); in virtgpu_gem_unmap_dma_buf() 77 .attach = virtio_dma_buf_attach, 166 struct drm_device *dev, struct dma_buf_attachment *attach, in virtgpu_gem_prime_import_sg_table() argument
|
| /drivers/gpu/drm/omapdrm/ |
| D | omap_gem_dmabuf.c | 100 struct dma_buf_attachment *attach; in omap_gem_prime_import() local 117 attach = dma_buf_attach(dma_buf, dev->dev); in omap_gem_prime_import() 118 if (IS_ERR(attach)) in omap_gem_prime_import() 119 return ERR_CAST(attach); in omap_gem_prime_import() 123 sgt = dma_buf_map_attachment_unlocked(attach, DMA_TO_DEVICE); in omap_gem_prime_import() 135 obj->import_attach = attach; in omap_gem_prime_import() 140 dma_buf_unmap_attachment_unlocked(attach, sgt, DMA_TO_DEVICE); in omap_gem_prime_import() 142 dma_buf_detach(dma_buf, attach); in omap_gem_prime_import()
|
| /drivers/gpu/drm/ |
| D | drm_prime.c | 605 struct dma_buf_attachment *attach) in drm_gem_map_attach() argument 631 struct dma_buf_attachment *attach) in drm_gem_map_detach() argument 651 struct sg_table *drm_gem_map_dma_buf(struct dma_buf_attachment *attach, in drm_gem_map_dma_buf() argument 654 struct drm_gem_object *obj = attach->dmabuf->priv; in drm_gem_map_dma_buf() 668 ret = dma_map_sgtable(attach->dev, sgt, dir, in drm_gem_map_dma_buf() 688 void drm_gem_unmap_dma_buf(struct dma_buf_attachment *attach, in drm_gem_unmap_dma_buf() argument 695 dma_unmap_sgtable(attach->dev, sgt, dir, DMA_ATTR_SKIP_CPU_SYNC); in drm_gem_unmap_dma_buf() 814 .attach = drm_gem_map_attach, 938 struct dma_buf_attachment *attach; in drm_gem_prime_import_dev() local 958 attach = dma_buf_attach(dma_buf, attach_dev); in drm_gem_prime_import_dev() [all …]
|
| D | drm_gem_dma_helper.c | 463 struct dma_buf_attachment *attach, in drm_gem_dma_prime_import_sg_table() argument 469 if (drm_prime_get_contiguous_size(sgt) < attach->dmabuf->size) in drm_gem_dma_prime_import_sg_table() 473 dma_obj = __drm_gem_dma_create(dev, attach->dmabuf->size, true); in drm_gem_dma_prime_import_sg_table() 481 attach->dmabuf->size); in drm_gem_dma_prime_import_sg_table() 575 struct dma_buf_attachment *attach, in drm_gem_dma_prime_import_sg_table_vmap() argument 583 ret = dma_buf_vmap_unlocked(attach->dmabuf, &map); in drm_gem_dma_prime_import_sg_table_vmap() 589 obj = drm_gem_dma_prime_import_sg_table(dev, attach, sgt); in drm_gem_dma_prime_import_sg_table_vmap() 591 dma_buf_vunmap_unlocked(attach->dmabuf, &map); in drm_gem_dma_prime_import_sg_table_vmap()
|
| /drivers/gpu/drm/tegra/ |
| D | gem.c | 82 map->attach = dma_buf_attach(buf, dev); in tegra_bo_pin() 83 if (IS_ERR(map->attach)) { in tegra_bo_pin() 84 err = PTR_ERR(map->attach); in tegra_bo_pin() 88 map->sgt = dma_buf_map_attachment_unlocked(map->attach, direction); in tegra_bo_pin() 90 dma_buf_detach(buf, map->attach); in tegra_bo_pin() 163 if (map->attach) { in tegra_bo_unpin() 164 dma_buf_unmap_attachment_unlocked(map->attach, map->sgt, in tegra_bo_unpin() 166 dma_buf_detach(map->attach->dmabuf, map->attach); in tegra_bo_unpin() 460 struct dma_buf_attachment *attach; in tegra_bo_import() local 468 attach = dma_buf_attach(buf, drm->dev); in tegra_bo_import() [all …]
|
| /drivers/gpu/drm/i915/gem/ |
| D | i915_gem_dmabuf.c | 28 static struct sg_table *i915_gem_map_dma_buf(struct dma_buf_attachment *attach, in i915_gem_map_dma_buf() argument 31 struct drm_i915_gem_object *obj = dma_buf_to_obj(attach->dmabuf); in i915_gem_map_dma_buf() 56 ret = dma_map_sgtable(attach->dev, sgt, dir, DMA_ATTR_SKIP_CPU_SYNC); in i915_gem_map_dma_buf() 168 struct dma_buf_attachment *attach) in i915_gem_dmabuf_attach() argument 197 struct dma_buf_attachment *attach) in i915_gem_dmabuf_detach() argument 205 .attach = i915_gem_dmabuf_attach, 286 struct dma_buf_attachment *attach; in i915_gem_prime_import() local 308 attach = dma_buf_attach(dma_buf, dev->dev); in i915_gem_prime_import() 309 if (IS_ERR(attach)) in i915_gem_prime_import() 310 return ERR_CAST(attach); in i915_gem_prime_import() [all …]
|
| /drivers/nvdimm/ |
| D | claim.c | 44 bool __nd_attach_ndns(struct device *dev, struct nd_namespace_common *attach, in __nd_attach_ndns() argument 47 struct nvdimm_bus *nvdimm_bus = walk_to_nvdimm_bus(&attach->dev); in __nd_attach_ndns() 49 if (attach->claim) in __nd_attach_ndns() 53 attach->claim = dev; in __nd_attach_ndns() 54 *_ndns = attach; in __nd_attach_ndns() 55 get_device(&attach->dev); in __nd_attach_ndns() 59 bool nd_attach_ndns(struct device *dev, struct nd_namespace_common *attach, in nd_attach_ndns() argument 64 nvdimm_bus_lock(&attach->dev); in nd_attach_ndns() 65 claimed = __nd_attach_ndns(dev, attach, _ndns); in nd_attach_ndns() 66 nvdimm_bus_unlock(&attach->dev); in nd_attach_ndns()
|
| /drivers/iio/ |
| D | industrialio-buffer.c | 53 struct dma_buf_attachment *attach; member 1562 struct dma_buf_attachment *attach = priv->attach; in iio_buffer_dmabuf_release() local 1564 struct dma_buf *dmabuf = attach->dmabuf; in iio_buffer_dmabuf_release() 1567 dma_buf_unmap_attachment(attach, priv->sgt, priv->dir); in iio_buffer_dmabuf_release() 1572 dma_buf_detach(attach->dmabuf, attach); in iio_buffer_dmabuf_release() 1577 static void iio_buffer_dmabuf_get(struct dma_buf_attachment *attach) in iio_buffer_dmabuf_get() argument 1579 struct iio_dmabuf_priv *priv = attach->importer_priv; in iio_buffer_dmabuf_get() 1584 static void iio_buffer_dmabuf_put(struct dma_buf_attachment *attach) in iio_buffer_dmabuf_put() argument 1586 struct iio_dmabuf_priv *priv = attach->importer_priv; in iio_buffer_dmabuf_put() 1605 iio_buffer_dmabuf_put(priv->attach); in iio_buffer_chrdev_release() [all …]
|
| /drivers/gpu/drm/vmwgfx/ |
| D | vmwgfx_prime.c | 45 struct dma_buf_attachment *attach) in vmw_prime_map_attach() argument 51 struct dma_buf_attachment *attach) in vmw_prime_map_detach() argument 55 static struct sg_table *vmw_prime_map_dma_buf(struct dma_buf_attachment *attach, in vmw_prime_map_dma_buf() argument 61 static void vmw_prime_unmap_dma_buf(struct dma_buf_attachment *attach, in vmw_prime_unmap_dma_buf() argument 68 .attach = vmw_prime_map_attach,
|
| /drivers/gpu/drm/armada/ |
| D | armada_gem.c | 386 armada_gem_prime_map_dma_buf(struct dma_buf_attachment *attach, in armada_gem_prime_map_dma_buf() argument 389 struct drm_gem_object *obj = attach->dmabuf->priv; in armada_gem_prime_map_dma_buf() 419 if (dma_map_sgtable(attach->dev, sgt, dir, 0)) in armada_gem_prime_map_dma_buf() 428 if (dma_map_sgtable(attach->dev, sgt, dir, 0)) in armada_gem_prime_map_dma_buf() 452 static void armada_gem_prime_unmap_dma_buf(struct dma_buf_attachment *attach, in armada_gem_prime_unmap_dma_buf() argument 455 struct drm_gem_object *obj = attach->dmabuf->priv; in armada_gem_prime_unmap_dma_buf() 460 dma_unmap_sgtable(attach->dev, sgt, dir, 0); in armada_gem_prime_unmap_dma_buf() 502 struct dma_buf_attachment *attach; in armada_gem_prime_import() local 517 attach = dma_buf_attach(buf, dev->dev); in armada_gem_prime_import() 518 if (IS_ERR(attach)) in armada_gem_prime_import() [all …]
|
| /drivers/vfio/ |
| D | device_cdev.c | 166 struct vfio_device_attach_iommufd_pt attach; in vfio_df_ioctl_attach_pt() local 172 if (copy_from_user(&attach, arg, minsz)) in vfio_df_ioctl_attach_pt() 175 if (attach.argsz < minsz || attach.flags) in vfio_df_ioctl_attach_pt() 179 ret = device->ops->attach_ioas(device, &attach.pt_id); in vfio_df_ioctl_attach_pt() 183 if (copy_to_user(&arg->pt_id, &attach.pt_id, sizeof(attach.pt_id))) { in vfio_df_ioctl_attach_pt()
|
| /drivers/virtio/ |
| D | virtio_dma_buf.c | 33 exp_info->ops->attach != &virtio_dma_buf_attach) in virtio_dma_buf_export() 44 struct dma_buf_attachment *attach) in virtio_dma_buf_attach() argument 52 ret = ops->device_attach(dma_buf, attach); in virtio_dma_buf_attach() 69 return dma_buf->ops->attach == &virtio_dma_buf_attach; in is_virtio_dma_buf()
|
| /drivers/gpu/drm/radeon/ |
| D | radeon_prime.c | 46 struct dma_buf_attachment *attach, in radeon_gem_prime_import_sg_table() argument 49 struct dma_resv *resv = attach->dmabuf->resv; in radeon_gem_prime_import_sg_table() 55 ret = radeon_bo_create(rdev, attach->dmabuf->size, PAGE_SIZE, false, in radeon_gem_prime_import_sg_table()
|
| /drivers/power/supply/ |
| D | mt6370-charger.c | 100 int attach; member 260 unsigned int attach, usb_stat; in mt6370_chg_bc12_work_func() local 263 attach = priv->attach; in mt6370_chg_bc12_work_func() 265 switch (attach) { in mt6370_chg_bc12_work_func() 270 ret = mt6370_chg_field_set(priv, F_USBCHGEN, attach); in mt6370_chg_bc12_work_func() 429 val->intval = !!priv->attach; in mt6370_chg_get_online() 515 if (pwr_rdy == !!priv->attach) { in mt6370_chg_set_online() 521 priv->attach = pwr_rdy; in mt6370_chg_set_online() 796 priv->attach = MT6370_ATTACH_STAT_ATTACH_BC12_DONE; in mt6370_attach_i_handler() 904 priv->attach = MT6370_ATTACH_STAT_DETACH; in mt6370_chg_probe()
|
| /drivers/net/ethernet/marvell/octeontx2/af/ |
| D | rvu.c | 343 u16 lf, bool attach) in rvu_update_rsrc_map() argument 365 block->fn_map[lf] = attach ? pcifunc : 0; in rvu_update_rsrc_map() 369 pfvf->npalf = attach ? true : false; in rvu_update_rsrc_map() 374 pfvf->nixlf = attach ? true : false; in rvu_update_rsrc_map() 378 attach ? pfvf->sso++ : pfvf->sso--; in rvu_update_rsrc_map() 382 attach ? pfvf->ssow++ : pfvf->ssow--; in rvu_update_rsrc_map() 386 attach ? pfvf->timlfs++ : pfvf->timlfs--; in rvu_update_rsrc_map() 390 attach ? pfvf->cptlfs++ : pfvf->cptlfs--; in rvu_update_rsrc_map() 394 attach ? pfvf->cpt1_lfs++ : pfvf->cpt1_lfs--; in rvu_update_rsrc_map() 1523 u16 pcifunc, struct rsrc_attach *attach) in rvu_get_attach_blkaddr() argument [all …]
|
| /drivers/gpu/drm/nouveau/ |
| D | nouveau_prime.c | 40 struct dma_buf_attachment *attach, in nouveau_gem_prime_import_sg_table() argument 46 struct dma_resv *robj = attach->dmabuf->resv; in nouveau_gem_prime_import_sg_table() 47 u64 size = attach->dmabuf->size; in nouveau_gem_prime_import_sg_table()
|
| /drivers/net/ethernet/qlogic/qlcnic/ |
| D | qlcnic_dcb.h | 30 int (*attach) (struct qlcnic_dcb *); member 60 if (dcb && dcb->ops->attach) in qlcnic_dcb_attach() 61 return dcb->ops->attach(dcb); in qlcnic_dcb_attach()
|