/drivers/gpu/drm/ |
D | drm_format_helper.c | 48 void (*xfrm_line)(void *dbuf, const void *sbuf, unsigned int npixels)) in __drm_fb_xfrm() argument 91 void (*xfrm_line)(void *dbuf, const void *sbuf, unsigned int npixels)) in __drm_fb_xfrm_toio() argument 101 void *dbuf; in __drm_fb_xfrm_toio() local 104 dbuf = kmalloc(dbuf_len, GFP_KERNEL); in __drm_fb_xfrm_toio() 106 dbuf = kmalloc(stmp_off + sbuf_len, GFP_KERNEL); in __drm_fb_xfrm_toio() 107 stmp = dbuf + stmp_off; in __drm_fb_xfrm_toio() 109 if (!dbuf) in __drm_fb_xfrm_toio() 121 xfrm_line(dbuf, sbuf, linepixels); in __drm_fb_xfrm_toio() 122 memcpy_toio(dst, dbuf, dbuf_len); in __drm_fb_xfrm_toio() 127 kfree(dbuf); in __drm_fb_xfrm_toio() [all …]
|
/drivers/media/common/videobuf2/ |
D | videobuf2-vmalloc.c | 32 struct dma_buf *dbuf; member 210 static int vb2_vmalloc_dmabuf_ops_attach(struct dma_buf *dbuf, in vb2_vmalloc_dmabuf_ops_attach() argument 214 struct vb2_vmalloc_buf *buf = dbuf->priv; in vb2_vmalloc_dmabuf_ops_attach() 249 static void vb2_vmalloc_dmabuf_ops_detach(struct dma_buf *dbuf, in vb2_vmalloc_dmabuf_ops_detach() argument 302 static void vb2_vmalloc_dmabuf_ops_release(struct dma_buf *dbuf) in vb2_vmalloc_dmabuf_ops_release() argument 305 vb2_vmalloc_put(dbuf->priv); in vb2_vmalloc_dmabuf_ops_release() 308 static int vb2_vmalloc_dmabuf_ops_vmap(struct dma_buf *dbuf, in vb2_vmalloc_dmabuf_ops_vmap() argument 311 struct vb2_vmalloc_buf *buf = dbuf->priv; in vb2_vmalloc_dmabuf_ops_vmap() 318 static int vb2_vmalloc_dmabuf_ops_mmap(struct dma_buf *dbuf, in vb2_vmalloc_dmabuf_ops_mmap() argument 321 return vb2_vmalloc_mmap(dbuf->priv, vma); in vb2_vmalloc_dmabuf_ops_mmap() [all …]
|
D | videobuf2-dma-sg.c | 368 static int vb2_dma_sg_dmabuf_ops_attach(struct dma_buf *dbuf, in vb2_dma_sg_dmabuf_ops_attach() argument 375 struct vb2_dma_sg_buf *buf = dbuf->priv; in vb2_dma_sg_dmabuf_ops_attach() 406 static void vb2_dma_sg_dmabuf_ops_detach(struct dma_buf *dbuf, in vb2_dma_sg_dmabuf_ops_detach() argument 459 static void vb2_dma_sg_dmabuf_ops_release(struct dma_buf *dbuf) in vb2_dma_sg_dmabuf_ops_release() argument 462 vb2_dma_sg_put(dbuf->priv); in vb2_dma_sg_dmabuf_ops_release() 466 vb2_dma_sg_dmabuf_ops_begin_cpu_access(struct dma_buf *dbuf, in vb2_dma_sg_dmabuf_ops_begin_cpu_access() argument 469 struct vb2_dma_sg_buf *buf = dbuf->priv; in vb2_dma_sg_dmabuf_ops_begin_cpu_access() 477 vb2_dma_sg_dmabuf_ops_end_cpu_access(struct dma_buf *dbuf, in vb2_dma_sg_dmabuf_ops_end_cpu_access() argument 480 struct vb2_dma_sg_buf *buf = dbuf->priv; in vb2_dma_sg_dmabuf_ops_end_cpu_access() 487 static int vb2_dma_sg_dmabuf_ops_vmap(struct dma_buf *dbuf, in vb2_dma_sg_dmabuf_ops_vmap() argument [all …]
|
D | videobuf2-dma-contig.c | 317 static int vb2_dc_dmabuf_ops_attach(struct dma_buf *dbuf, in vb2_dc_dmabuf_ops_attach() argument 324 struct vb2_dc_buf *buf = dbuf->priv; in vb2_dc_dmabuf_ops_attach() 355 static void vb2_dc_dmabuf_ops_detach(struct dma_buf *dbuf, in vb2_dc_dmabuf_ops_detach() argument 420 static void vb2_dc_dmabuf_ops_release(struct dma_buf *dbuf) in vb2_dc_dmabuf_ops_release() argument 423 vb2_dc_put(dbuf->priv); in vb2_dc_dmabuf_ops_release() 427 vb2_dc_dmabuf_ops_begin_cpu_access(struct dma_buf *dbuf, in vb2_dc_dmabuf_ops_begin_cpu_access() argument 434 vb2_dc_dmabuf_ops_end_cpu_access(struct dma_buf *dbuf, in vb2_dc_dmabuf_ops_end_cpu_access() argument 440 static int vb2_dc_dmabuf_ops_vmap(struct dma_buf *dbuf, struct iosys_map *map) in vb2_dc_dmabuf_ops_vmap() argument 445 buf = dbuf->priv; in vb2_dc_dmabuf_ops_vmap() 455 static int vb2_dc_dmabuf_ops_mmap(struct dma_buf *dbuf, in vb2_dc_dmabuf_ops_mmap() argument [all …]
|
D | videobuf2-core.c | 301 dma_buf_put(p->dbuf); in __vb2_plane_dmabuf_put() 303 p->dbuf = NULL; in __vb2_plane_dmabuf_put() 1260 struct dma_buf *dbuf = dma_buf_get(planes[plane].m.fd); in __prepare_dmabuf() local 1262 if (IS_ERR_OR_NULL(dbuf)) { in __prepare_dmabuf() 1271 planes[plane].length = dbuf->size; in __prepare_dmabuf() 1277 dma_buf_put(dbuf); in __prepare_dmabuf() 1283 if (dbuf == vb->planes[plane].dbuf && in __prepare_dmabuf() 1285 dma_buf_put(dbuf); in __prepare_dmabuf() 1308 dbuf, in __prepare_dmabuf() 1313 dma_buf_put(dbuf); in __prepare_dmabuf() [all …]
|
/drivers/scsi/lpfc/ |
D | lpfc_mem.c | 511 hbqbp->dbuf.virt = dma_pool_alloc(phba->lpfc_hbq_pool, GFP_KERNEL, in lpfc_els_hbq_alloc() 512 &hbqbp->dbuf.phys); in lpfc_els_hbq_alloc() 513 if (!hbqbp->dbuf.virt) { in lpfc_els_hbq_alloc() 536 dma_pool_free(phba->lpfc_hbq_pool, hbqbp->dbuf.virt, hbqbp->dbuf.phys); in lpfc_els_hbq_free() 569 dma_buf->dbuf.virt = dma_pool_alloc(phba->lpfc_drb_pool, GFP_KERNEL, in lpfc_sli4_rb_alloc() 570 &dma_buf->dbuf.phys); in lpfc_sli4_rb_alloc() 571 if (!dma_buf->dbuf.virt) { in lpfc_sli4_rb_alloc() 597 dma_pool_free(phba->lpfc_drb_pool, dmab->dbuf.virt, dmab->dbuf.phys); in lpfc_sli4_rb_free() 627 dma_buf->dbuf.virt = dma_pool_alloc(phba->lpfc_nvmet_drb_pool, in lpfc_sli4_nvmet_alloc() 628 GFP_KERNEL, &dma_buf->dbuf.phys); in lpfc_sli4_nvmet_alloc() [all …]
|
D | lpfc_sli.c | 2398 hbq_buf = container_of(dmabuf, struct hbq_dmabuf, dbuf); in lpfc_sli_hbqbuf_free_all() 2399 list_del(&hbq_buf->dbuf.list); in lpfc_sli_hbqbuf_free_all() 2446 dma_addr_t physaddr = hbq_buf->dbuf.phys; in lpfc_sli_hbq_to_firmware_s3() 2465 list_add_tail(&hbq_buf->dbuf.list, &hbqp->hbq_buffer_list); in lpfc_sli_hbq_to_firmware_s3() 2499 drqe.address_lo = putPaddrLow(hbq_buf->dbuf.phys); in lpfc_sli_hbq_to_firmware_s4() 2500 drqe.address_hi = putPaddrHigh(hbq_buf->dbuf.phys); in lpfc_sli_hbq_to_firmware_s4() 2505 list_add_tail(&hbq_buf->dbuf.list, &phba->hbqs[hbqno].hbq_buffer_list); in lpfc_sli_hbq_to_firmware_s4() 2557 list_add_tail(&hbq_buffer->dbuf.list, &hbq_buf_list); in lpfc_sli_hbqbuf_fill_hbqs() 2565 dbuf.list); in lpfc_sli_hbqbuf_fill_hbqs() 2580 dbuf.list); in lpfc_sli_hbqbuf_fill_hbqs() [all …]
|
D | lpfc.h | 163 struct lpfc_dmabuf dbuf; member 174 struct lpfc_dmabuf dbuf; member
|
/drivers/gpu/drm/xen/ |
D | xen_drm_front.c | 44 struct xen_drm_front_dbuf *dbuf, u64 dbuf_cookie) in dbuf_add_to_list() argument 46 dbuf->dbuf_cookie = dbuf_cookie; in dbuf_add_to_list() 47 list_add(&dbuf->list, &front_info->dbuf_list); in dbuf_add_to_list() 163 struct xen_drm_front_dbuf *dbuf; in xen_drm_front_dbuf_create() local 173 dbuf = kzalloc(sizeof(*dbuf), GFP_KERNEL); in xen_drm_front_dbuf_create() 174 if (!dbuf) in xen_drm_front_dbuf_create() 177 dbuf_add_to_list(front_info, dbuf, dbuf_cookie); in xen_drm_front_dbuf_create() 183 buf_cfg.pgdir = &dbuf->shbuf; in xen_drm_front_dbuf_create() 195 xen_front_pgdir_shbuf_get_dir_start(&dbuf->shbuf); in xen_drm_front_dbuf_create() 215 ret = xen_front_pgdir_shbuf_map(&dbuf->shbuf); in xen_drm_front_dbuf_create()
|
/drivers/gpu/drm/i915/display/ |
D | intel_display_device.c | 448 .dbuf.size = 896 - 4, /* 4 blocks for bypass path allocation */ 449 .dbuf.slice_mask = BIT(DBUF_S1), 473 .dbuf.slice_mask = BIT(DBUF_S1), \ 497 .dbuf.size = 512 - 4, /* 4 blocks for bypass path allocation */ 504 .dbuf.size = 1024 - 4, /* 4 blocks for bypass path allocation */ 512 .dbuf.size = 2048, \ 513 .dbuf.slice_mask = BIT(DBUF_S1) | BIT(DBUF_S2), \ 565 .dbuf.size = 2048, \ 566 .dbuf.slice_mask = BIT(DBUF_S1) | BIT(DBUF_S2), \ 653 .dbuf.size = 4096, \ [all …]
|
D | skl_watermark.h | 67 …uf_state(intel_atomic_get_old_global_obj_state(state, &to_i915(state->base.dev)->display.dbuf.obj)) 69 …uf_state(intel_atomic_get_new_global_obj_state(state, &to_i915(state->base.dev)->display.dbuf.obj))
|
D | intel_display_device.h | 105 } dbuf; member
|
D | skl_watermark.c | 510 return DISPLAY_INFO(i915)->dbuf.size / in intel_dbuf_slice_size() 511 hweight8(DISPLAY_INFO(i915)->dbuf.slice_mask); in intel_dbuf_slice_size() 530 WARN_ON(ddb->end > DISPLAY_INFO(i915)->dbuf.size); in skl_ddb_entry_for_slices() 2628 DISPLAY_INFO(i915)->dbuf.slice_mask, in skl_compute_ddb() 2996 to_intel_dbuf_state(i915->display.dbuf.obj.state); in skl_wm_get_hw_state() 3058 dbuf_state->enabled_slices = i915->display.dbuf.enabled_slices; in skl_wm_get_hw_state() 3064 to_intel_dbuf_state(i915->display.dbuf.obj.state); in skl_dbuf_is_misconfigured() 3163 hw_enabled_slices != i915->display.dbuf.enabled_slices) in intel_wm_state_verify() 3166 i915->display.dbuf.enabled_slices, in intel_wm_state_verify() 3452 dbuf_state = intel_atomic_get_global_obj_state(state, &i915->display.dbuf.obj); in intel_atomic_get_dbuf_state() [all …]
|
D | intel_display_core.h | 345 } dbuf; member
|
D | intel_display_power.c | 1081 u8 slice_mask = DISPLAY_INFO(dev_priv)->dbuf.slice_mask; in gen9_dbuf_slices_update() 1103 dev_priv->display.dbuf.enabled_slices = req_slices; in gen9_dbuf_slices_update() 1112 dev_priv->display.dbuf.enabled_slices = in gen9_dbuf_enable() 1115 slices_mask = BIT(DBUF_S1) | dev_priv->display.dbuf.enabled_slices; in gen9_dbuf_enable()
|
D | intel_modeset_setup.c | 160 to_intel_dbuf_state(i915->display.dbuf.obj.state); in intel_crtc_disable_noatomic_complete() 680 to_intel_dbuf_state(i915->display.dbuf.obj.state); in intel_modeset_readout_hw_state()
|
/drivers/tty/serial/ |
D | amba-pl011.c | 826 struct pl011_dmabuf *dbuf; in pl011_dma_rx_trigger_dma() local 832 dbuf = uap->dmarx.use_buf_b ? in pl011_dma_rx_trigger_dma() 834 desc = dmaengine_prep_slave_single(rxchan, dbuf->dma, dbuf->len, in pl011_dma_rx_trigger_dma() 874 struct pl011_dmabuf *dbuf = use_buf_b ? in pl011_dma_rx_chars() local 884 dmataken = dbuf->len - dmarx->last_residue; in pl011_dma_rx_chars() 898 dma_count = tty_insert_flip_string(port, dbuf->buf + dmataken, in pl011_dma_rx_chars() 909 dmarx->last_residue = dbuf->len; in pl011_dma_rx_chars() 944 struct pl011_dmabuf *dbuf = dmarx->use_buf_b ? in pl011_dma_rx_irq() local 967 pending = dbuf->len - state.residue; in pl011_dma_rx_irq() 994 struct pl011_dmabuf *dbuf = dmarx->use_buf_b ? in pl011_dma_rx_callback() local [all …]
|
/drivers/base/firmware_loader/ |
D | main.c | 114 void *dbuf, in __allocate_fw_priv() argument 122 if ((opt_flags & FW_OPT_PARTIAL) && !dbuf) in __allocate_fw_priv() 141 fw_priv->data = dbuf; in __allocate_fw_priv() 168 struct fw_priv **fw_priv, void *dbuf, size_t size, in alloc_lookup_fw_priv() argument 189 tmp = __allocate_fw_priv(fw_name, fwc, dbuf, size, offset, opt_flags); in alloc_lookup_fw_priv() 806 struct device *device, void *dbuf, size_t size, in _request_firmware_prepare() argument 820 if (firmware_request_builtin_buf(firmware, name, dbuf, size)) { in _request_firmware_prepare() 825 ret = alloc_lookup_fw_priv(name, &fw_cache, &fw_priv, dbuf, size, in _request_firmware_prepare()
|
D | firmware.h | 162 struct fw_priv **fw_priv, void *dbuf, size_t size,
|
/drivers/misc/mei/ |
D | dma-ring.c | 131 unsigned char *dbuf = dev->dr_dscr[DMA_DSCR_DEVICE].vaddr; in mei_dma_copy_from() local 136 memcpy(buf, dbuf + b_offset, b_n); in mei_dma_copy_from()
|
/drivers/crypto/bcm/ |
D | util.c | 275 u8 dbuf[16]; in __dump_sg() local 283 sg_copy_part_to_buf(sg, dbuf, count, idx); in __dump_sg() 286 4, 1, dbuf, count, false); in __dump_sg()
|
/drivers/media/platform/nvidia/tegra-vde/ |
D | v4l2.c | 177 get_dma_buf(vb->planes[i].dbuf); in tegra_buf_init() 179 err = tegra_vde_dmabuf_cache_map(vde, vb->planes[i].dbuf, in tegra_buf_init() 183 dma_buf_put(vb->planes[i].dbuf); in tegra_buf_init()
|
/drivers/usb/gadget/udc/ |
D | omap_udc.c | 2465 unsigned buf, unsigned maxp, int dbuf) in omap_ep_setup() argument 2506 dbuf = 1; in omap_ep_setup() 2513 dbuf = 0; in omap_ep_setup() 2531 if (dbuf && addr) in omap_ep_setup() 2541 name, addr, epn_rxtx, maxp, dbuf ? "x2" : "", buf); in omap_ep_setup() 2550 if (dbuf) in omap_ep_setup() 2561 ep->double_buf = dbuf; in omap_ep_setup()
|
/drivers/scsi/qla2xxx/ |
D | qla_sup.c | 1544 uint8_t *dbuf = vmalloc(RMW_BUFFER_SIZE); in qla25xx_write_nvram_data() local 1546 if (!dbuf) in qla25xx_write_nvram_data() 1548 ha->isp_ops->read_optrom(vha, dbuf, ha->flt_region_vpd_nvram << 2, in qla25xx_write_nvram_data() 1550 memcpy(dbuf + (naddr << 2), buf, bytes); in qla25xx_write_nvram_data() 1551 ha->isp_ops->write_optrom(vha, dbuf, ha->flt_region_vpd_nvram << 2, in qla25xx_write_nvram_data() 1553 vfree(dbuf); in qla25xx_write_nvram_data()
|
/drivers/mmc/host/ |
D | cavium.c | 299 u64 dbuf) in do_read() argument 307 writeq((0x10000 | (dbuf << 6)), host->base + MIO_EMM_BUF_IDX(host)); in do_read()
|