Home
last modified time | relevance | path

Searched refs:dma_buf (Results 1 – 25 of 73) sorted by relevance

123

/drivers/gpu/drm/
Ddrm_prime.c65 struct dma_buf *dma_buf; member
78 struct dma_buf *dma_buf, uint32_t handle) in drm_prime_add_buf_handle() argument
87 get_dma_buf(dma_buf); in drm_prime_add_buf_handle()
88 member->dma_buf = dma_buf; in drm_prime_add_buf_handle()
98 if (dma_buf > pos->dma_buf) in drm_prime_add_buf_handle()
124 static struct dma_buf *drm_prime_lookup_buf_by_handle(struct drm_prime_file_private *prime_fpriv, in drm_prime_lookup_buf_by_handle()
135 return member->dma_buf; in drm_prime_lookup_buf_by_handle()
146 struct dma_buf *dma_buf, in drm_prime_lookup_buf_handle() argument
156 if (member->dma_buf == dma_buf) { in drm_prime_lookup_buf_handle()
159 } else if (member->dma_buf < dma_buf) { in drm_prime_lookup_buf_handle()
[all …]
Ddrm_fb_cma_helper.c287 struct dma_buf *dma_buf; in drm_fb_cma_prepare_fb() local
293 dma_buf = drm_fb_cma_get_gem_obj(state->fb, 0)->base.dma_buf; in drm_fb_cma_prepare_fb()
294 if (dma_buf) { in drm_fb_cma_prepare_fb()
295 fence = reservation_object_get_excl_rcu(dma_buf->resv); in drm_fb_cma_prepare_fb()
/drivers/gpu/drm/i915/
Di915_gem_dmabuf.c34 static struct drm_i915_gem_object *dma_buf_to_obj(struct dma_buf *buf) in dma_buf_to_obj()
111 static void *i915_gem_dmabuf_vmap(struct dma_buf *dma_buf) in i915_gem_dmabuf_vmap() argument
113 struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf); in i915_gem_dmabuf_vmap()
128 static void i915_gem_dmabuf_vunmap(struct dma_buf *dma_buf, void *vaddr) in i915_gem_dmabuf_vunmap() argument
130 struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf); in i915_gem_dmabuf_vunmap()
138 static void *i915_gem_dmabuf_kmap_atomic(struct dma_buf *dma_buf, unsigned long page_num) in i915_gem_dmabuf_kmap_atomic() argument
143 static void i915_gem_dmabuf_kunmap_atomic(struct dma_buf *dma_buf, unsigned long page_num, void *ad… in i915_gem_dmabuf_kunmap_atomic() argument
147 static void *i915_gem_dmabuf_kmap(struct dma_buf *dma_buf, unsigned long page_num) in i915_gem_dmabuf_kmap() argument
152 static void i915_gem_dmabuf_kunmap(struct dma_buf *dma_buf, unsigned long page_num, void *addr) in i915_gem_dmabuf_kunmap() argument
157 static int i915_gem_dmabuf_mmap(struct dma_buf *dma_buf, struct vm_area_struct *vma) in i915_gem_dmabuf_mmap() argument
[all …]
Di915_gem_dmabuf.h33 struct dma_buf *dma_buf; in i915_gem_object_get_dmabuf_resv() local
35 if (obj->base.dma_buf) in i915_gem_object_get_dmabuf_resv()
36 dma_buf = obj->base.dma_buf; in i915_gem_object_get_dmabuf_resv()
38 dma_buf = obj->base.import_attach->dmabuf; in i915_gem_object_get_dmabuf_resv()
42 return dma_buf->resv; in i915_gem_object_get_dmabuf_resv()
/drivers/gpu/drm/vmwgfx/
Dvmwgfx_prime.c42 static int vmw_prime_map_attach(struct dma_buf *dma_buf, in vmw_prime_map_attach() argument
49 static void vmw_prime_map_detach(struct dma_buf *dma_buf, in vmw_prime_map_detach() argument
66 static void *vmw_prime_dmabuf_vmap(struct dma_buf *dma_buf) in vmw_prime_dmabuf_vmap() argument
71 static void vmw_prime_dmabuf_vunmap(struct dma_buf *dma_buf, void *vaddr) in vmw_prime_dmabuf_vunmap() argument
75 static void *vmw_prime_dmabuf_kmap_atomic(struct dma_buf *dma_buf, in vmw_prime_dmabuf_kmap_atomic() argument
81 static void vmw_prime_dmabuf_kunmap_atomic(struct dma_buf *dma_buf, in vmw_prime_dmabuf_kunmap_atomic() argument
86 static void *vmw_prime_dmabuf_kmap(struct dma_buf *dma_buf, in vmw_prime_dmabuf_kmap() argument
92 static void vmw_prime_dmabuf_kunmap(struct dma_buf *dma_buf, in vmw_prime_dmabuf_kunmap() argument
98 static int vmw_prime_dmabuf_mmap(struct dma_buf *dma_buf, in vmw_prime_dmabuf_mmap() argument
Dvmwgfx_resource.c634 struct vmw_dma_buffer *dma_buf; in vmw_user_dmabuf_synccpu_ioctl() local
650 ret = vmw_user_dmabuf_lookup(tfile, arg->handle, &dma_buf, in vmw_user_dmabuf_synccpu_ioctl()
655 user_bo = container_of(dma_buf, struct vmw_user_dma_buffer, in vmw_user_dmabuf_synccpu_ioctl()
658 vmw_dmabuf_unreference(&dma_buf); in vmw_user_dmabuf_synccpu_ioctl()
692 struct vmw_dma_buffer *dma_buf; in vmw_dmabuf_alloc_ioctl() local
701 req->size, false, &handle, &dma_buf, in vmw_dmabuf_alloc_ioctl()
707 rep->map_handle = drm_vma_node_offset_addr(&dma_buf->base.vma_node); in vmw_dmabuf_alloc_ioctl()
711 vmw_dmabuf_unreference(&dma_buf); in vmw_dmabuf_alloc_ioctl()
764 struct vmw_dma_buffer *dma_buf, in vmw_user_dmabuf_reference() argument
769 if (dma_buf->base.destroy != vmw_user_dmabuf_destroy) in vmw_user_dmabuf_reference()
[all …]
/drivers/staging/android/ion/
Dion_test.c40 struct dma_buf *dma_buf; member
44 static int ion_handle_test_dma(struct device *dev, struct dma_buf *dma_buf, in ion_handle_test_dma() argument
56 attach = dma_buf_attach(dma_buf, dev); in ion_handle_test_dma()
97 dma_buf_detach(dma_buf, attach); in ion_handle_test_dma()
101 static int ion_handle_test_kernel(struct dma_buf *dma_buf, void __user *ptr, in ion_handle_test_kernel() argument
110 if (offset > dma_buf->size || size > dma_buf->size - offset) in ion_handle_test_kernel()
113 ret = dma_buf_begin_cpu_access(dma_buf, dir); in ion_handle_test_kernel()
119 void *vaddr = dma_buf_kmap(dma_buf, page_offset); in ion_handle_test_kernel()
131 dma_buf_kunmap(dma_buf, page_offset, vaddr); in ion_handle_test_kernel()
143 dma_buf_end_cpu_access(dma_buf, dir); in ion_handle_test_kernel()
[all …]
/drivers/gpu/drm/omapdrm/
Domap_gem_dmabuf.c75 static void omap_gem_dmabuf_release(struct dma_buf *buffer) in omap_gem_dmabuf_release()
85 static int omap_gem_dmabuf_begin_cpu_access(struct dma_buf *buffer, in omap_gem_dmabuf_begin_cpu_access()
100 static int omap_gem_dmabuf_end_cpu_access(struct dma_buf *buffer, in omap_gem_dmabuf_end_cpu_access()
109 static void *omap_gem_dmabuf_kmap_atomic(struct dma_buf *buffer, in omap_gem_dmabuf_kmap_atomic()
119 static void omap_gem_dmabuf_kunmap_atomic(struct dma_buf *buffer, in omap_gem_dmabuf_kunmap_atomic()
125 static void *omap_gem_dmabuf_kmap(struct dma_buf *buffer, in omap_gem_dmabuf_kmap()
135 static void omap_gem_dmabuf_kunmap(struct dma_buf *buffer, in omap_gem_dmabuf_kunmap()
144 static int omap_gem_dmabuf_mmap(struct dma_buf *buffer, in omap_gem_dmabuf_mmap()
170 struct dma_buf *omap_gem_prime_export(struct drm_device *dev, in omap_gem_prime_export()
188 struct dma_buf *dma_buf) in omap_gem_prime_import() argument
[all …]
/drivers/gpu/drm/udl/
Dudl_dmabuf.c31 static int udl_attach_dma_buf(struct dma_buf *dmabuf, in udl_attach_dma_buf()
50 static void udl_detach_dma_buf(struct dma_buf *dmabuf, in udl_detach_dma_buf()
153 static void *udl_dmabuf_kmap(struct dma_buf *dma_buf, unsigned long page_num) in udl_dmabuf_kmap() argument
160 static void *udl_dmabuf_kmap_atomic(struct dma_buf *dma_buf, in udl_dmabuf_kmap_atomic() argument
168 static void udl_dmabuf_kunmap(struct dma_buf *dma_buf, in udl_dmabuf_kunmap() argument
174 static void udl_dmabuf_kunmap_atomic(struct dma_buf *dma_buf, in udl_dmabuf_kunmap_atomic() argument
181 static int udl_dmabuf_mmap(struct dma_buf *dma_buf, in udl_dmabuf_mmap() argument
202 struct dma_buf *udl_gem_prime_export(struct drm_device *dev, in udl_gem_prime_export()
244 struct dma_buf *dma_buf) in udl_gem_prime_import() argument
253 attach = dma_buf_attach(dma_buf, dev->dev); in udl_gem_prime_import()
[all …]
Dudl_drv.h127 struct dma_buf *udl_gem_prime_export(struct drm_device *dev,
130 struct dma_buf *dma_buf);
/drivers/gpu/drm/ttm/
Dttm_object.c95 void (*dmabuf_release)(struct dma_buf *dma_buf);
130 static void ttm_prime_dmabuf_release(struct dma_buf *dma_buf);
520 tdev->dma_buf_size = ttm_round_pot(sizeof(struct dma_buf)) + in ttm_object_device_init()
557 static bool __must_check get_dma_buf_unless_doomed(struct dma_buf *dmabuf) in get_dma_buf_unless_doomed()
579 BUG_ON(prime->dma_buf != NULL); in ttm_prime_refcount_release()
595 static void ttm_prime_dmabuf_release(struct dma_buf *dma_buf) in ttm_prime_dmabuf_release() argument
598 (struct ttm_prime_object *) dma_buf->priv; in ttm_prime_dmabuf_release()
603 tdev->dmabuf_release(dma_buf); in ttm_prime_dmabuf_release()
605 if (prime->dma_buf == dma_buf) in ttm_prime_dmabuf_release()
606 prime->dma_buf = NULL; in ttm_prime_dmabuf_release()
[all …]
/drivers/dma-buf/
Ddma-buf.c51 struct dma_buf *dmabuf; in dma_buf_release()
86 struct dma_buf *dmabuf; in dma_buf_mmap_internal()
103 struct dma_buf *dmabuf; in dma_buf_llseek()
140 struct dma_buf *dmabuf; in dma_buf_poll()
260 struct dma_buf *dmabuf; in dma_buf_ioctl()
334 struct dma_buf *dma_buf_export(const struct dma_buf_export_info *exp_info) in dma_buf_export()
336 struct dma_buf *dmabuf; in dma_buf_export()
339 size_t alloc_size = sizeof(struct dma_buf); in dma_buf_export()
417 int dma_buf_fd(struct dma_buf *dmabuf, int flags) in dma_buf_fd()
442 struct dma_buf *dma_buf_get(int fd) in dma_buf_get()
[all …]
/drivers/scsi/csiostor/
Dcsio_scsi.c206 struct csio_dma_buf *dma_buf; in csio_scsi_init_cmd_wr() local
222 dma_buf = &req->dma_buf; in csio_scsi_init_cmd_wr()
225 wr->rsp_dmalen = cpu_to_be32(dma_buf->len); in csio_scsi_init_cmd_wr()
226 wr->rsp_dmaaddr = cpu_to_be64(dma_buf->paddr); in csio_scsi_init_cmd_wr()
298 struct csio_dma_buf *dma_buf; in csio_scsi_init_ultptx_dsgl() local
329 dma_buf = (struct csio_dma_buf *)tmp; in csio_scsi_init_ultptx_dsgl()
331 sgl->addr0 = cpu_to_be64(dma_buf->paddr); in csio_scsi_init_ultptx_dsgl()
333 min(xfer_len, dma_buf->len)); in csio_scsi_init_ultptx_dsgl()
336 sge_pair->addr[1] = cpu_to_be64(dma_buf->paddr); in csio_scsi_init_ultptx_dsgl()
338 min(xfer_len, dma_buf->len)); in csio_scsi_init_ultptx_dsgl()
[all …]
Dcsio_lnode.c269 cmd = fdmi_req->dma_buf.vaddr; in csio_ln_fdmi_done()
302 cmd = fdmi_req->dma_buf.vaddr; in csio_ln_fdmi_rhba_cbfn()
374 FCOE_CT, &fdmi_req->dma_buf, len)) { in csio_ln_fdmi_rhba_cbfn()
410 cmd = fdmi_req->dma_buf.vaddr; in csio_ln_fdmi_dprt_cbfn()
481 FCOE_CT, &fdmi_req->dma_buf, len)) { in csio_ln_fdmi_dprt_cbfn()
511 cmd = fdmi_req->dma_buf.vaddr; in csio_ln_fdmi_dhba_cbfn()
532 FCOE_CT, &fdmi_req->dma_buf, len)) { in csio_ln_fdmi_dhba_cbfn()
570 cmd = fdmi_req->dma_buf.vaddr; in csio_ln_fdmi_start()
581 FCOE_CT, &fdmi_req->dma_buf, len)) { in csio_ln_fdmi_start()
1691 wr->rsp_dmalen = cpu_to_be32(io_req->dma_buf.len); in csio_ln_prep_ecwr()
[all …]
/drivers/scsi/lpfc/
Dlpfc_mem.c501 struct hbq_dmabuf *dma_buf; in lpfc_sli4_rb_alloc() local
503 dma_buf = kzalloc(sizeof(struct hbq_dmabuf), GFP_KERNEL); in lpfc_sli4_rb_alloc()
504 if (!dma_buf) in lpfc_sli4_rb_alloc()
507 dma_buf->hbuf.virt = pci_pool_alloc(phba->lpfc_hrb_pool, GFP_KERNEL, in lpfc_sli4_rb_alloc()
508 &dma_buf->hbuf.phys); in lpfc_sli4_rb_alloc()
509 if (!dma_buf->hbuf.virt) { in lpfc_sli4_rb_alloc()
510 kfree(dma_buf); in lpfc_sli4_rb_alloc()
513 dma_buf->dbuf.virt = pci_pool_alloc(phba->lpfc_drb_pool, GFP_KERNEL, in lpfc_sli4_rb_alloc()
514 &dma_buf->dbuf.phys); in lpfc_sli4_rb_alloc()
515 if (!dma_buf->dbuf.virt) { in lpfc_sli4_rb_alloc()
[all …]
/drivers/media/v4l2-core/
Dvideobuf2-vmalloc.c31 struct dma_buf *dbuf;
210 static int vb2_vmalloc_dmabuf_ops_attach(struct dma_buf *dbuf, struct device *dev, in vb2_vmalloc_dmabuf_ops_attach()
249 static void vb2_vmalloc_dmabuf_ops_detach(struct dma_buf *dbuf, in vb2_vmalloc_dmabuf_ops_detach()
315 static void vb2_vmalloc_dmabuf_ops_release(struct dma_buf *dbuf) in vb2_vmalloc_dmabuf_ops_release()
321 static void *vb2_vmalloc_dmabuf_ops_kmap(struct dma_buf *dbuf, unsigned long pgnum) in vb2_vmalloc_dmabuf_ops_kmap()
328 static void *vb2_vmalloc_dmabuf_ops_vmap(struct dma_buf *dbuf) in vb2_vmalloc_dmabuf_ops_vmap()
335 static int vb2_vmalloc_dmabuf_ops_mmap(struct dma_buf *dbuf, in vb2_vmalloc_dmabuf_ops_mmap()
353 static struct dma_buf *vb2_vmalloc_get_dmabuf(void *buf_priv, unsigned long flags) in vb2_vmalloc_get_dmabuf()
356 struct dma_buf *dbuf; in vb2_vmalloc_get_dmabuf()
410 static void *vb2_vmalloc_attach_dmabuf(struct device *dev, struct dma_buf *dbuf, in vb2_vmalloc_attach_dmabuf()
Dvideobuf2-dma-sg.c372 static int vb2_dma_sg_dmabuf_ops_attach(struct dma_buf *dbuf, struct device *dev, in vb2_dma_sg_dmabuf_ops_attach()
410 static void vb2_dma_sg_dmabuf_ops_detach(struct dma_buf *dbuf, in vb2_dma_sg_dmabuf_ops_detach()
476 static void vb2_dma_sg_dmabuf_ops_release(struct dma_buf *dbuf) in vb2_dma_sg_dmabuf_ops_release()
482 static void *vb2_dma_sg_dmabuf_ops_kmap(struct dma_buf *dbuf, unsigned long pgnum) in vb2_dma_sg_dmabuf_ops_kmap()
489 static void *vb2_dma_sg_dmabuf_ops_vmap(struct dma_buf *dbuf) in vb2_dma_sg_dmabuf_ops_vmap()
496 static int vb2_dma_sg_dmabuf_ops_mmap(struct dma_buf *dbuf, in vb2_dma_sg_dmabuf_ops_mmap()
514 static struct dma_buf *vb2_dma_sg_get_dmabuf(void *buf_priv, unsigned long flags) in vb2_dma_sg_get_dmabuf()
517 struct dma_buf *dbuf; in vb2_dma_sg_get_dmabuf()
607 static void *vb2_dma_sg_attach_dmabuf(struct device *dev, struct dma_buf *dbuf, in vb2_dma_sg_attach_dmabuf()
Dvideobuf2-dma-contig.c224 static int vb2_dc_dmabuf_ops_attach(struct dma_buf *dbuf, struct device *dev, in vb2_dc_dmabuf_ops_attach()
262 static void vb2_dc_dmabuf_ops_detach(struct dma_buf *dbuf, in vb2_dc_dmabuf_ops_detach()
328 static void vb2_dc_dmabuf_ops_release(struct dma_buf *dbuf) in vb2_dc_dmabuf_ops_release()
334 static void *vb2_dc_dmabuf_ops_kmap(struct dma_buf *dbuf, unsigned long pgnum) in vb2_dc_dmabuf_ops_kmap()
341 static void *vb2_dc_dmabuf_ops_vmap(struct dma_buf *dbuf) in vb2_dc_dmabuf_ops_vmap()
348 static int vb2_dc_dmabuf_ops_mmap(struct dma_buf *dbuf, in vb2_dc_dmabuf_ops_mmap()
388 static struct dma_buf *vb2_dc_get_dmabuf(void *buf_priv, unsigned long flags) in vb2_dc_get_dmabuf()
391 struct dma_buf *dbuf; in vb2_dc_get_dmabuf()
673 static void *vb2_dc_attach_dmabuf(struct device *dev, struct dma_buf *dbuf, in vb2_dc_attach_dmabuf()
/drivers/mtd/nand/
Dlpc32xx_mlc.c211 uint8_t *dma_buf; member
452 uint8_t *dma_buf; in lpc32xx_read_page() local
456 dma_buf = buf; in lpc32xx_read_page()
459 dma_buf = host->dma_buf; in lpc32xx_read_page()
485 res = lpc32xx_xmit_dma(mtd, dma_buf + i * 512, 512, in lpc32xx_read_page()
504 memcpy(buf, dma_buf, mtd->writesize); in lpc32xx_read_page()
516 uint8_t *dma_buf = (uint8_t *)buf; in lpc32xx_write_page_lowlevel() local
521 dma_buf = host->dma_buf; in lpc32xx_write_page_lowlevel()
522 memcpy(dma_buf, buf, mtd->writesize); in lpc32xx_write_page_lowlevel()
531 res = lpc32xx_xmit_dma(mtd, dma_buf + i * 512, 512, in lpc32xx_write_page_lowlevel()
[all …]
/drivers/gpu/drm/tegra/
Dgem.c301 struct dma_buf *buf) in tegra_bo_import()
548 static void tegra_gem_prime_release(struct dma_buf *buf) in tegra_gem_prime_release()
553 static void *tegra_gem_prime_kmap_atomic(struct dma_buf *buf, in tegra_gem_prime_kmap_atomic()
559 static void tegra_gem_prime_kunmap_atomic(struct dma_buf *buf, in tegra_gem_prime_kunmap_atomic()
565 static void *tegra_gem_prime_kmap(struct dma_buf *buf, unsigned long page) in tegra_gem_prime_kmap()
570 static void tegra_gem_prime_kunmap(struct dma_buf *buf, unsigned long page, in tegra_gem_prime_kunmap()
575 static int tegra_gem_prime_mmap(struct dma_buf *buf, struct vm_area_struct *vma) in tegra_gem_prime_mmap()
580 static void *tegra_gem_prime_vmap(struct dma_buf *buf) in tegra_gem_prime_vmap()
588 static void tegra_gem_prime_vunmap(struct dma_buf *buf, void *vaddr) in tegra_gem_prime_vunmap()
605 struct dma_buf *tegra_gem_prime_export(struct drm_device *drm, in tegra_gem_prime_export()
[all …]
/drivers/net/wireless/mediatek/mt7601u/
Dmcu.c291 const struct mt7601u_dma_buf *dma_buf, in __mt7601u_dma_fw() argument
295 struct mt7601u_dma_buf buf = *dma_buf; /* we need to fake length */ in __mt7601u_dma_fw()
343 mt7601u_dma_fw(struct mt7601u_dev *dev, struct mt7601u_dma_buf *dma_buf, in mt7601u_dma_fw() argument
352 ret = __mt7601u_dma_fw(dev, dma_buf, data, n, dst_addr); in mt7601u_dma_fw()
359 return mt7601u_dma_fw(dev, dma_buf, data + n, len - n, dst_addr + n); in mt7601u_dma_fw()
365 struct mt7601u_dma_buf dma_buf; in mt7601u_upload_firmware() local
373 if (mt7601u_usb_alloc_buf(dev, MCU_FW_URB_SIZE, &dma_buf)) { in mt7601u_upload_firmware()
381 ret = mt7601u_dma_fw(dev, &dma_buf, fw->ilm, ilm_len, sizeof(fw->ivb)); in mt7601u_upload_firmware()
387 ret = mt7601u_dma_fw(dev, &dma_buf, fw->ilm + ilm_len, in mt7601u_upload_firmware()
408 mt7601u_usb_free_buf(dev, &dma_buf); in mt7601u_upload_firmware()
/drivers/gpu/drm/vgem/
Dvgem_fence.c118 struct dma_buf *dmabuf; in attach_dmabuf()
120 if (obj->dma_buf) in attach_dmabuf()
127 obj->dma_buf = dmabuf; in attach_dmabuf()
184 resv = obj->dma_buf->resv; in vgem_fence_attach_ioctl()
/drivers/gpu/drm/mediatek/
Dmtk_drm_fb.c114 if (!gem || !gem->dma_buf || !gem->dma_buf->resv) in mtk_fb_wait()
117 resv = gem->dma_buf->resv; in mtk_fb_wait()
/drivers/gpu/drm/imx/
Dimx-drm-core.c160 struct dma_buf *dma_buf; in imx_drm_atomic_commit() local
169 dma_buf = drm_fb_cma_get_gem_obj(plane_state->fb, in imx_drm_atomic_commit()
170 0)->base.dma_buf; in imx_drm_atomic_commit()
171 if (!dma_buf) in imx_drm_atomic_commit()
174 reservation_object_get_excl_rcu(dma_buf->resv); in imx_drm_atomic_commit()
/drivers/net/ethernet/micrel/
Dksz884x.c989 struct ksz_dma_buf dma_buf; member
992 #define DMA_BUFFER(desc) ((struct ksz_dma_buf *)(&(desc)->dma_buf))
4442 static void free_dma_buf(struct dev_info *adapter, struct ksz_dma_buf *dma_buf, in free_dma_buf() argument
4445 pci_unmap_single(adapter->pdev, dma_buf->dma, dma_buf->len, direction); in free_dma_buf()
4446 dev_kfree_skb(dma_buf->skb); in free_dma_buf()
4447 dma_buf->skb = NULL; in free_dma_buf()
4448 dma_buf->dma = 0; in free_dma_buf()
4461 struct ksz_dma_buf *dma_buf; in ksz_init_rx_buffers() local
4468 dma_buf = DMA_BUFFER(desc); in ksz_init_rx_buffers()
4469 if (dma_buf->skb && dma_buf->len != adapter->mtu) in ksz_init_rx_buffers()
[all …]

123