Home
last modified time | relevance | path

Searched refs:dma_buf (Results 1 – 25 of 40) sorted by relevance

12

/drivers/gpu/drm/
Ddrm_prime.c62 struct dma_buf *dma_buf; member
65 …e_add_buf_handle(struct drm_prime_file_private *prime_fpriv, struct dma_buf *dma_buf, uint32_t han…
92 static void drm_gem_dmabuf_release(struct dma_buf *dma_buf) in drm_gem_dmabuf_release() argument
94 struct drm_gem_object *obj = dma_buf->priv; in drm_gem_dmabuf_release()
96 if (obj->export_dma_buf == dma_buf) { in drm_gem_dmabuf_release()
103 static void *drm_gem_dmabuf_vmap(struct dma_buf *dma_buf) in drm_gem_dmabuf_vmap() argument
105 struct drm_gem_object *obj = dma_buf->priv; in drm_gem_dmabuf_vmap()
111 static void drm_gem_dmabuf_vunmap(struct dma_buf *dma_buf, void *vaddr) in drm_gem_dmabuf_vunmap() argument
113 struct drm_gem_object *obj = dma_buf->priv; in drm_gem_dmabuf_vunmap()
119 static void *drm_gem_dmabuf_kmap_atomic(struct dma_buf *dma_buf, in drm_gem_dmabuf_kmap_atomic() argument
[all …]
/drivers/gpu/drm/i915/
Di915_gem_dmabuf.c93 static void i915_gem_dmabuf_release(struct dma_buf *dma_buf) in i915_gem_dmabuf_release() argument
95 struct drm_i915_gem_object *obj = dma_buf->priv; in i915_gem_dmabuf_release()
97 if (obj->base.export_dma_buf == dma_buf) { in i915_gem_dmabuf_release()
104 static void *i915_gem_dmabuf_vmap(struct dma_buf *dma_buf) in i915_gem_dmabuf_vmap() argument
106 struct drm_i915_gem_object *obj = dma_buf->priv; in i915_gem_dmabuf_vmap()
152 static void i915_gem_dmabuf_vunmap(struct dma_buf *dma_buf, void *vaddr) in i915_gem_dmabuf_vunmap() argument
154 struct drm_i915_gem_object *obj = dma_buf->priv; in i915_gem_dmabuf_vunmap()
171 static void *i915_gem_dmabuf_kmap_atomic(struct dma_buf *dma_buf, unsigned long page_num) in i915_gem_dmabuf_kmap_atomic() argument
176 static void i915_gem_dmabuf_kunmap_atomic(struct dma_buf *dma_buf, unsigned long page_num, void *ad… in i915_gem_dmabuf_kunmap_atomic() argument
180 static void *i915_gem_dmabuf_kmap(struct dma_buf *dma_buf, unsigned long page_num) in i915_gem_dmabuf_kmap() argument
[all …]
/drivers/staging/android/ion/
Dion_test.c40 struct dma_buf *dma_buf; member
44 static int ion_handle_test_dma(struct device *dev, struct dma_buf *dma_buf, in ion_handle_test_dma() argument
55 attach = dma_buf_attach(dma_buf, dev); in ion_handle_test_dma()
96 dma_buf_detach(dma_buf, attach); in ion_handle_test_dma()
100 static int ion_handle_test_kernel(struct dma_buf *dma_buf, void __user *ptr, in ion_handle_test_kernel() argument
109 if (offset > dma_buf->size || size > dma_buf->size - offset) in ion_handle_test_kernel()
112 ret = dma_buf_begin_cpu_access(dma_buf, offset, size, dir); in ion_handle_test_kernel()
118 void *vaddr = dma_buf_kmap(dma_buf, page_offset); in ion_handle_test_kernel()
130 dma_buf_kunmap(dma_buf, page_offset, vaddr); in ion_handle_test_kernel()
142 dma_buf_end_cpu_access(dma_buf, offset, size, dir); in ion_handle_test_kernel()
[all …]
Dion.c936 struct dma_buf *dmabuf = attachment->dmabuf; in ion_map_dma_buf()
1061 static int ion_mmap(struct dma_buf *dmabuf, struct vm_area_struct *vma) in ion_mmap()
1096 static void ion_dma_buf_release(struct dma_buf *dmabuf) in ion_dma_buf_release()
1103 static void *ion_dma_buf_kmap(struct dma_buf *dmabuf, unsigned long offset) in ion_dma_buf_kmap()
1110 static void ion_dma_buf_kunmap(struct dma_buf *dmabuf, unsigned long offset, in ion_dma_buf_kunmap()
1116 static int ion_dma_buf_begin_cpu_access(struct dma_buf *dmabuf, size_t start, in ion_dma_buf_begin_cpu_access()
1137 static void ion_dma_buf_end_cpu_access(struct dma_buf *dmabuf, size_t start, in ion_dma_buf_end_cpu_access()
1161 struct dma_buf *ion_share_dma_buf(struct ion_client *client, in ion_share_dma_buf()
1165 struct dma_buf *dmabuf; in ion_share_dma_buf()
1191 struct dma_buf *dmabuf; in ion_share_dma_buf_fd()
[all …]
/drivers/gpu/drm/exynos/
Dexynos_drm_dmabuf.c25 static int exynos_gem_attach_dma_buf(struct dma_buf *dmabuf, in exynos_gem_attach_dma_buf()
41 static void exynos_gem_detach_dma_buf(struct dma_buf *dmabuf, in exynos_gem_detach_dma_buf()
132 static void exynos_dmabuf_release(struct dma_buf *dmabuf) in exynos_dmabuf_release()
155 static void *exynos_gem_dmabuf_kmap_atomic(struct dma_buf *dma_buf, in exynos_gem_dmabuf_kmap_atomic() argument
163 static void exynos_gem_dmabuf_kunmap_atomic(struct dma_buf *dma_buf, in exynos_gem_dmabuf_kunmap_atomic() argument
170 static void *exynos_gem_dmabuf_kmap(struct dma_buf *dma_buf, in exynos_gem_dmabuf_kmap() argument
178 static void exynos_gem_dmabuf_kunmap(struct dma_buf *dma_buf, in exynos_gem_dmabuf_kunmap() argument
184 static int exynos_gem_dmabuf_mmap(struct dma_buf *dma_buf, in exynos_gem_dmabuf_mmap() argument
203 struct dma_buf *exynos_dmabuf_prime_export(struct drm_device *drm_dev, in exynos_dmabuf_prime_export()
213 struct dma_buf *dma_buf) in exynos_dmabuf_prime_import() argument
[all …]
Dexynos_drm_dmabuf.h16 struct dma_buf *exynos_dmabuf_prime_export(struct drm_device *drm_dev,
20 struct dma_buf *dma_buf);
/drivers/base/
Ddma-buf.c44 struct dma_buf *dmabuf; in dma_buf_release()
65 struct dma_buf *dmabuf; in dma_buf_mmap_internal()
110 struct dma_buf *dma_buf_export_named(void *priv, const struct dma_buf_ops *ops, in dma_buf_export_named()
113 struct dma_buf *dmabuf; in dma_buf_export_named()
126 dmabuf = kzalloc(sizeof(struct dma_buf), GFP_KERNEL); in dma_buf_export_named()
158 int dma_buf_fd(struct dma_buf *dmabuf, int flags) in dma_buf_fd()
183 struct dma_buf *dma_buf_get(int fd) in dma_buf_get()
207 void dma_buf_put(struct dma_buf *dmabuf) in dma_buf_put()
226 struct dma_buf_attachment *dma_buf_attach(struct dma_buf *dmabuf, in dma_buf_attach()
268 void dma_buf_detach(struct dma_buf *dmabuf, struct dma_buf_attachment *attach) in dma_buf_detach()
[all …]
/drivers/gpu/drm/omapdrm/
Domap_gem_dmabuf.c71 static void omap_gem_dmabuf_release(struct dma_buf *buffer) in omap_gem_dmabuf_release()
81 static int omap_gem_dmabuf_begin_cpu_access(struct dma_buf *buffer, in omap_gem_dmabuf_begin_cpu_access()
96 static void omap_gem_dmabuf_end_cpu_access(struct dma_buf *buffer, in omap_gem_dmabuf_end_cpu_access()
104 static void *omap_gem_dmabuf_kmap_atomic(struct dma_buf *buffer, in omap_gem_dmabuf_kmap_atomic()
114 static void omap_gem_dmabuf_kunmap_atomic(struct dma_buf *buffer, in omap_gem_dmabuf_kunmap_atomic()
120 static void *omap_gem_dmabuf_kmap(struct dma_buf *buffer, in omap_gem_dmabuf_kmap()
130 static void omap_gem_dmabuf_kunmap(struct dma_buf *buffer, in omap_gem_dmabuf_kunmap()
143 static int omap_gem_dmabuf_mmap(struct dma_buf *buffer, in omap_gem_dmabuf_mmap()
194 struct dma_buf *omap_gem_prime_export(struct drm_device *dev, in omap_gem_prime_export()
201 struct dma_buf *buffer) in omap_gem_prime_import()
/drivers/video/adf/
Dadf_memblock.c65 static void __init_memblock adf_memblock_release(struct dma_buf *buf) in adf_memblock_release()
75 static void *adf_memblock_do_kmap(struct dma_buf *buf, unsigned long pgoffset, in adf_memblock_do_kmap()
88 static void *adf_memblock_kmap_atomic(struct dma_buf *buf, in adf_memblock_kmap_atomic()
94 static void adf_memblock_kunmap_atomic(struct dma_buf *buf, in adf_memblock_kunmap_atomic()
100 static void *adf_memblock_kmap(struct dma_buf *buf, unsigned long pgoffset) in adf_memblock_kmap()
105 static void adf_memblock_kunmap(struct dma_buf *buf, unsigned long pgoffset, in adf_memblock_kunmap()
111 static int adf_memblock_mmap(struct dma_buf *buf, struct vm_area_struct *vma) in adf_memblock_mmap()
141 struct dma_buf *adf_memblock_export(phys_addr_t base, size_t size, int flags) in adf_memblock_export()
144 struct dma_buf *buf; in adf_memblock_export()
Dadf_fbdev.c210 buf.dma_bufs[0] = fbdev->dma_buf; in adf_fbdev_post()
256 &fbdev->dma_buf, &fbdev->offset, &fbdev->pitch); in adf_fb_alloc()
262 fbdev->vaddr = dma_buf_vmap(fbdev->dma_buf); in adf_fb_alloc()
271 fbdev->info->fix.smem_len = fbdev->dma_buf->size; in adf_fb_alloc()
277 dma_buf_put(fbdev->dma_buf); in adf_fb_alloc()
283 dma_buf_vunmap(fbdev->dma_buf, fbdev->vaddr); in adf_fb_destroy()
284 dma_buf_put(fbdev->dma_buf); in adf_fb_destroy()
569 return dma_buf_mmap(fbdev->dma_buf, vma, 0); in adf_fbdev_mmap()
/drivers/scsi/lpfc/
Dlpfc_mem.c454 struct hbq_dmabuf *dma_buf; in lpfc_sli4_rb_alloc() local
456 dma_buf = kzalloc(sizeof(struct hbq_dmabuf), GFP_KERNEL); in lpfc_sli4_rb_alloc()
457 if (!dma_buf) in lpfc_sli4_rb_alloc()
460 dma_buf->hbuf.virt = pci_pool_alloc(phba->lpfc_hrb_pool, GFP_KERNEL, in lpfc_sli4_rb_alloc()
461 &dma_buf->hbuf.phys); in lpfc_sli4_rb_alloc()
462 if (!dma_buf->hbuf.virt) { in lpfc_sli4_rb_alloc()
463 kfree(dma_buf); in lpfc_sli4_rb_alloc()
466 dma_buf->dbuf.virt = pci_pool_alloc(phba->lpfc_drb_pool, GFP_KERNEL, in lpfc_sli4_rb_alloc()
467 &dma_buf->dbuf.phys); in lpfc_sli4_rb_alloc()
468 if (!dma_buf->dbuf.virt) { in lpfc_sli4_rb_alloc()
[all …]
/drivers/scsi/csiostor/
Dcsio_scsi.c230 struct csio_dma_buf *dma_buf; in csio_scsi_init_cmd_wr() local
246 dma_buf = &req->dma_buf; in csio_scsi_init_cmd_wr()
249 wr->rsp_dmalen = cpu_to_be32(dma_buf->len); in csio_scsi_init_cmd_wr()
250 wr->rsp_dmaaddr = cpu_to_be64(dma_buf->paddr); in csio_scsi_init_cmd_wr()
322 struct csio_dma_buf *dma_buf; in csio_scsi_init_ultptx_dsgl() local
353 dma_buf = (struct csio_dma_buf *)tmp; in csio_scsi_init_ultptx_dsgl()
355 sgl->addr0 = cpu_to_be64(dma_buf->paddr); in csio_scsi_init_ultptx_dsgl()
357 min(xfer_len, dma_buf->len)); in csio_scsi_init_ultptx_dsgl()
360 sge_pair->addr[1] = cpu_to_be64(dma_buf->paddr); in csio_scsi_init_ultptx_dsgl()
362 min(xfer_len, dma_buf->len)); in csio_scsi_init_ultptx_dsgl()
[all …]
Dcsio_lnode.c269 cmd = fdmi_req->dma_buf.vaddr; in csio_ln_fdmi_done()
302 cmd = fdmi_req->dma_buf.vaddr; in csio_ln_fdmi_rhba_cbfn()
374 FCOE_CT, &fdmi_req->dma_buf, len)) { in csio_ln_fdmi_rhba_cbfn()
410 cmd = fdmi_req->dma_buf.vaddr; in csio_ln_fdmi_dprt_cbfn()
481 FCOE_CT, &fdmi_req->dma_buf, len)) { in csio_ln_fdmi_dprt_cbfn()
511 cmd = fdmi_req->dma_buf.vaddr; in csio_ln_fdmi_dhba_cbfn()
532 FCOE_CT, &fdmi_req->dma_buf, len)) { in csio_ln_fdmi_dhba_cbfn()
570 cmd = fdmi_req->dma_buf.vaddr; in csio_ln_fdmi_start()
581 FCOE_CT, &fdmi_req->dma_buf, len)) { in csio_ln_fdmi_start()
1691 wr->rsp_dmalen = cpu_to_be32(io_req->dma_buf.len); in csio_ln_prep_ecwr()
[all …]
/drivers/gpu/drm/udl/
Dudl_gem.c294 struct dma_buf *dma_buf) in udl_gem_prime_import() argument
302 attach = dma_buf_attach(dma_buf, dev->dev); in udl_gem_prime_import()
306 get_dma_buf(dma_buf); in udl_gem_prime_import()
314 ret = udl_prime_create(dev, dma_buf->size, sg, &uobj); in udl_gem_prime_import()
326 dma_buf_detach(dma_buf, attach); in udl_gem_prime_import()
327 dma_buf_put(dma_buf); in udl_gem_prime_import()
Dudl_drv.h125 struct dma_buf *dma_buf);
/drivers/mtd/nand/
Dlpc32xx_mlc.c198 uint8_t *dma_buf; member
439 uint8_t *dma_buf; in lpc32xx_read_page() local
443 dma_buf = buf; in lpc32xx_read_page()
446 dma_buf = host->dma_buf; in lpc32xx_read_page()
472 res = lpc32xx_xmit_dma(mtd, dma_buf + i * 512, 512, in lpc32xx_read_page()
491 memcpy(buf, dma_buf, mtd->writesize); in lpc32xx_read_page()
502 uint8_t *dma_buf = (uint8_t *)buf; in lpc32xx_write_page_lowlevel() local
507 dma_buf = host->dma_buf; in lpc32xx_write_page_lowlevel()
508 memcpy(dma_buf, buf, mtd->writesize); in lpc32xx_write_page_lowlevel()
517 res = lpc32xx_xmit_dma(mtd, dma_buf + i * 512, 512, in lpc32xx_write_page_lowlevel()
[all …]
/drivers/media/v4l2-core/
Dvideobuf2-dma-contig.c232 static int vb2_dc_dmabuf_ops_attach(struct dma_buf *dbuf, struct device *dev, in vb2_dc_dmabuf_ops_attach()
270 static void vb2_dc_dmabuf_ops_detach(struct dma_buf *dbuf, in vb2_dc_dmabuf_ops_detach()
336 static void vb2_dc_dmabuf_ops_release(struct dma_buf *dbuf) in vb2_dc_dmabuf_ops_release()
342 static void *vb2_dc_dmabuf_ops_kmap(struct dma_buf *dbuf, unsigned long pgnum) in vb2_dc_dmabuf_ops_kmap()
349 static void *vb2_dc_dmabuf_ops_vmap(struct dma_buf *dbuf) in vb2_dc_dmabuf_ops_vmap()
356 static int vb2_dc_dmabuf_ops_mmap(struct dma_buf *dbuf, in vb2_dc_dmabuf_ops_mmap()
396 static struct dma_buf *vb2_dc_get_dmabuf(void *buf_priv) in vb2_dc_get_dmabuf()
399 struct dma_buf *dbuf; in vb2_dc_get_dmabuf()
699 static void *vb2_dc_attach_dmabuf(void *alloc_ctx, struct dma_buf *dbuf, in vb2_dc_attach_dmabuf()
Dvideobuf2-vmalloc.c33 struct dma_buf *dbuf;
242 static void *vb2_vmalloc_attach_dmabuf(void *alloc_ctx, struct dma_buf *dbuf, in vb2_vmalloc_attach_dmabuf()
/drivers/net/ethernet/micrel/
Dksz884x.c989 struct ksz_dma_buf dma_buf; member
992 #define DMA_BUFFER(desc) ((struct ksz_dma_buf *)(&(desc)->dma_buf))
4451 static void free_dma_buf(struct dev_info *adapter, struct ksz_dma_buf *dma_buf, in free_dma_buf() argument
4454 pci_unmap_single(adapter->pdev, dma_buf->dma, dma_buf->len, direction); in free_dma_buf()
4455 dev_kfree_skb(dma_buf->skb); in free_dma_buf()
4456 dma_buf->skb = NULL; in free_dma_buf()
4457 dma_buf->dma = 0; in free_dma_buf()
4470 struct ksz_dma_buf *dma_buf; in ksz_init_rx_buffers() local
4477 dma_buf = DMA_BUFFER(desc); in ksz_init_rx_buffers()
4478 if (dma_buf->skb && dma_buf->len != adapter->mtu) in ksz_init_rx_buffers()
[all …]
/drivers/net/ethernet/sfc/
Dmcdi_mon.c99 hwmon->dma_buf.dma_addr & 0xffffffff); in efx_mcdi_mon_update()
101 (u64)hwmon->dma_buf.dma_addr >> 32); in efx_mcdi_mon_update()
135 *entry = ((efx_dword_t *)hwmon->dma_buf.addr)[index]; in efx_mcdi_mon_get_entry()
265 rc = efx_nic_alloc_buffer(efx, &hwmon->dma_buf, in efx_mcdi_mon_probe()
413 efx_nic_free_buffer(efx, &hwmon->dma_buf); in efx_mcdi_mon_remove()
/drivers/spi/
Dspi-tegra114.c598 u32 *dma_buf; in tegra_spi_init_dma_param() local
613 dma_buf = dma_alloc_coherent(tspi->dev, tspi->dma_buf_size, in tegra_spi_init_dma_param()
615 if (!dma_buf) { in tegra_spi_init_dma_param()
637 tspi->rx_dma_buf = dma_buf; in tegra_spi_init_dma_param()
641 tspi->tx_dma_buf = dma_buf; in tegra_spi_init_dma_param()
647 dma_free_coherent(tspi->dev, tspi->dma_buf_size, dma_buf, dma_phys); in tegra_spi_init_dma_param()
655 u32 *dma_buf; in tegra_spi_deinit_dma_param() local
660 dma_buf = tspi->rx_dma_buf; in tegra_spi_deinit_dma_param()
666 dma_buf = tspi->tx_dma_buf; in tegra_spi_deinit_dma_param()
675 dma_free_coherent(tspi->dev, tspi->dma_buf_size, dma_buf, dma_phys); in tegra_spi_deinit_dma_param()
Dspi-tegra20-slink.c628 u32 *dma_buf; in tegra_slink_init_dma_param() local
643 dma_buf = dma_alloc_coherent(tspi->dev, tspi->dma_buf_size, in tegra_slink_init_dma_param()
645 if (!dma_buf) { in tegra_slink_init_dma_param()
667 tspi->rx_dma_buf = dma_buf; in tegra_slink_init_dma_param()
671 tspi->tx_dma_buf = dma_buf; in tegra_slink_init_dma_param()
677 dma_free_coherent(tspi->dev, tspi->dma_buf_size, dma_buf, dma_phys); in tegra_slink_init_dma_param()
685 u32 *dma_buf; in tegra_slink_deinit_dma_param() local
690 dma_buf = tspi->rx_dma_buf; in tegra_slink_deinit_dma_param()
696 dma_buf = tspi->tx_dma_buf; in tegra_slink_deinit_dma_param()
705 dma_free_coherent(tspi->dev, tspi->dma_buf_size, dma_buf, dma_phys); in tegra_slink_deinit_dma_param()
/drivers/parport/
Dparport_gsc.c259 priv->dma_buf = 0; in parport_gsc_probe_port()
395 if (priv->dma_buf) in parport_remove_chip()
397 priv->dma_buf, in parport_remove_chip()
/drivers/media/pci/pluto2/
Dpluto2.c125 u8 dma_buf[TS_DMA_BYTES]; member
245 pluto->dma_addr = pci_map_single(pluto->pdev, pluto->dma_buf, in pluto_dma_map()
308 while (pluto->dma_buf[i] == 0x47) in pluto_dma_end()
317 dvb_dmx_swfilter_packets(&pluto->demux, pluto->dma_buf, nbpackets); in pluto_dma_end()
321 memset(pluto->dma_buf, 0, nbpackets * 188); in pluto_dma_end()
/drivers/gpu/drm/vmwgfx/
Dvmwgfx_resource.c488 struct vmw_dma_buffer *dma_buf; in vmw_dmabuf_alloc_ioctl() local
498 req->size, false, &handle, &dma_buf); in vmw_dmabuf_alloc_ioctl()
503 rep->map_handle = dma_buf->base.addr_space_offset; in vmw_dmabuf_alloc_ioctl()
507 vmw_dmabuf_unreference(&dma_buf); in vmw_dmabuf_alloc_ioctl()
555 struct vmw_dma_buffer *dma_buf) in vmw_user_dmabuf_reference() argument
559 if (dma_buf->base.destroy != vmw_user_dmabuf_destroy) in vmw_user_dmabuf_reference()
562 user_bo = container_of(dma_buf, struct vmw_user_dma_buffer, dma); in vmw_user_dmabuf_reference()

12