Lines Matching refs:buf
60 static int vb2_dma_sg_alloc_compacted(struct vb2_dma_sg_buf *buf, in vb2_dma_sg_alloc_compacted() argument
64 unsigned long size = buf->size; in vb2_dma_sg_alloc_compacted()
85 __free_page(buf->pages[last_page]); in vb2_dma_sg_alloc_compacted()
93 buf->pages[last_page++] = &pages[i]; in vb2_dma_sg_alloc_compacted()
104 struct vb2_dma_sg_buf *buf; in vb2_dma_sg_alloc() local
112 buf = kzalloc(sizeof *buf, GFP_KERNEL); in vb2_dma_sg_alloc()
113 if (!buf) in vb2_dma_sg_alloc()
116 buf->vaddr = NULL; in vb2_dma_sg_alloc()
117 buf->dma_dir = vb->vb2_queue->dma_dir; in vb2_dma_sg_alloc()
118 buf->offset = 0; in vb2_dma_sg_alloc()
119 buf->size = size; in vb2_dma_sg_alloc()
121 buf->num_pages = size >> PAGE_SHIFT; in vb2_dma_sg_alloc()
122 buf->dma_sgt = &buf->sg_table; in vb2_dma_sg_alloc()
129 buf->pages = kvmalloc_array(buf->num_pages, sizeof(struct page *), in vb2_dma_sg_alloc()
131 if (!buf->pages) in vb2_dma_sg_alloc()
134 ret = vb2_dma_sg_alloc_compacted(buf, vb->vb2_queue->gfp_flags); in vb2_dma_sg_alloc()
138 ret = sg_alloc_table_from_pages(buf->dma_sgt, buf->pages, in vb2_dma_sg_alloc()
139 buf->num_pages, 0, size, GFP_KERNEL); in vb2_dma_sg_alloc()
144 buf->dev = get_device(dev); in vb2_dma_sg_alloc()
146 sgt = &buf->sg_table; in vb2_dma_sg_alloc()
151 if (dma_map_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_dma_sg_alloc()
155 buf->handler.refcount = &buf->refcount; in vb2_dma_sg_alloc()
156 buf->handler.put = vb2_dma_sg_put; in vb2_dma_sg_alloc()
157 buf->handler.arg = buf; in vb2_dma_sg_alloc()
158 buf->vb = vb; in vb2_dma_sg_alloc()
160 refcount_set(&buf->refcount, 1); in vb2_dma_sg_alloc()
163 __func__, buf->num_pages); in vb2_dma_sg_alloc()
164 return buf; in vb2_dma_sg_alloc()
167 put_device(buf->dev); in vb2_dma_sg_alloc()
168 sg_free_table(buf->dma_sgt); in vb2_dma_sg_alloc()
170 num_pages = buf->num_pages; in vb2_dma_sg_alloc()
172 __free_page(buf->pages[num_pages]); in vb2_dma_sg_alloc()
174 kvfree(buf->pages); in vb2_dma_sg_alloc()
176 kfree(buf); in vb2_dma_sg_alloc()
182 struct vb2_dma_sg_buf *buf = buf_priv; in vb2_dma_sg_put() local
183 struct sg_table *sgt = &buf->sg_table; in vb2_dma_sg_put()
184 int i = buf->num_pages; in vb2_dma_sg_put()
186 if (refcount_dec_and_test(&buf->refcount)) { in vb2_dma_sg_put()
188 buf->num_pages); in vb2_dma_sg_put()
189 dma_unmap_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_dma_sg_put()
191 if (buf->vaddr) in vb2_dma_sg_put()
192 vm_unmap_ram(buf->vaddr, buf->num_pages); in vb2_dma_sg_put()
193 sg_free_table(buf->dma_sgt); in vb2_dma_sg_put()
195 __free_page(buf->pages[i]); in vb2_dma_sg_put()
196 kvfree(buf->pages); in vb2_dma_sg_put()
197 put_device(buf->dev); in vb2_dma_sg_put()
198 kfree(buf); in vb2_dma_sg_put()
204 struct vb2_dma_sg_buf *buf = buf_priv; in vb2_dma_sg_prepare() local
205 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_prepare()
207 dma_sync_sgtable_for_device(buf->dev, sgt, buf->dma_dir); in vb2_dma_sg_prepare()
212 struct vb2_dma_sg_buf *buf = buf_priv; in vb2_dma_sg_finish() local
213 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_finish()
215 dma_sync_sgtable_for_cpu(buf->dev, sgt, buf->dma_dir); in vb2_dma_sg_finish()
221 struct vb2_dma_sg_buf *buf; in vb2_dma_sg_get_userptr() local
228 buf = kzalloc(sizeof *buf, GFP_KERNEL); in vb2_dma_sg_get_userptr()
229 if (!buf) in vb2_dma_sg_get_userptr()
232 buf->vaddr = NULL; in vb2_dma_sg_get_userptr()
233 buf->dev = dev; in vb2_dma_sg_get_userptr()
234 buf->dma_dir = vb->vb2_queue->dma_dir; in vb2_dma_sg_get_userptr()
235 buf->offset = vaddr & ~PAGE_MASK; in vb2_dma_sg_get_userptr()
236 buf->size = size; in vb2_dma_sg_get_userptr()
237 buf->dma_sgt = &buf->sg_table; in vb2_dma_sg_get_userptr()
238 buf->vb = vb; in vb2_dma_sg_get_userptr()
242 buf->vec = vec; in vb2_dma_sg_get_userptr()
244 buf->pages = frame_vector_pages(vec); in vb2_dma_sg_get_userptr()
245 if (IS_ERR(buf->pages)) in vb2_dma_sg_get_userptr()
247 buf->num_pages = frame_vector_count(vec); in vb2_dma_sg_get_userptr()
249 if (sg_alloc_table_from_pages(buf->dma_sgt, buf->pages, in vb2_dma_sg_get_userptr()
250 buf->num_pages, buf->offset, size, 0)) in vb2_dma_sg_get_userptr()
253 sgt = &buf->sg_table; in vb2_dma_sg_get_userptr()
258 if (dma_map_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_dma_sg_get_userptr()
262 return buf; in vb2_dma_sg_get_userptr()
265 sg_free_table(&buf->sg_table); in vb2_dma_sg_get_userptr()
269 kfree(buf); in vb2_dma_sg_get_userptr()
279 struct vb2_dma_sg_buf *buf = buf_priv; in vb2_dma_sg_put_userptr() local
280 struct sg_table *sgt = &buf->sg_table; in vb2_dma_sg_put_userptr()
281 int i = buf->num_pages; in vb2_dma_sg_put_userptr()
284 __func__, buf->num_pages); in vb2_dma_sg_put_userptr()
285 dma_unmap_sgtable(buf->dev, sgt, buf->dma_dir, DMA_ATTR_SKIP_CPU_SYNC); in vb2_dma_sg_put_userptr()
286 if (buf->vaddr) in vb2_dma_sg_put_userptr()
287 vm_unmap_ram(buf->vaddr, buf->num_pages); in vb2_dma_sg_put_userptr()
288 sg_free_table(buf->dma_sgt); in vb2_dma_sg_put_userptr()
289 if (buf->dma_dir == DMA_FROM_DEVICE || in vb2_dma_sg_put_userptr()
290 buf->dma_dir == DMA_BIDIRECTIONAL) in vb2_dma_sg_put_userptr()
292 set_page_dirty_lock(buf->pages[i]); in vb2_dma_sg_put_userptr()
293 vb2_destroy_framevec(buf->vec); in vb2_dma_sg_put_userptr()
294 kfree(buf); in vb2_dma_sg_put_userptr()
299 struct vb2_dma_sg_buf *buf = buf_priv; in vb2_dma_sg_vaddr() local
303 BUG_ON(!buf); in vb2_dma_sg_vaddr()
305 if (!buf->vaddr) { in vb2_dma_sg_vaddr()
306 if (buf->db_attach) { in vb2_dma_sg_vaddr()
307 ret = dma_buf_vmap(buf->db_attach->dmabuf, &map); in vb2_dma_sg_vaddr()
308 buf->vaddr = ret ? NULL : map.vaddr; in vb2_dma_sg_vaddr()
310 buf->vaddr = vm_map_ram(buf->pages, buf->num_pages, -1); in vb2_dma_sg_vaddr()
315 return buf->vaddr ? buf->vaddr + buf->offset : NULL; in vb2_dma_sg_vaddr()
320 struct vb2_dma_sg_buf *buf = buf_priv; in vb2_dma_sg_num_users() local
322 return refcount_read(&buf->refcount); in vb2_dma_sg_num_users()
327 struct vb2_dma_sg_buf *buf = buf_priv; in vb2_dma_sg_mmap() local
330 if (!buf) { in vb2_dma_sg_mmap()
335 err = vm_map_pages(vma, buf->pages, buf->num_pages); in vb2_dma_sg_mmap()
344 vma->vm_private_data = &buf->handler; in vb2_dma_sg_mmap()
368 struct vb2_dma_sg_buf *buf = dbuf->priv; in vb2_dma_sg_dmabuf_ops_attach() local
379 ret = sg_alloc_table(sgt, buf->dma_sgt->orig_nents, GFP_KERNEL); in vb2_dma_sg_dmabuf_ops_attach()
385 rd = buf->dma_sgt->sgl; in vb2_dma_sg_dmabuf_ops_attach()
471 struct vb2_dma_sg_buf *buf = dbuf->priv; in vb2_dma_sg_dmabuf_ops_begin_cpu_access() local
472 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_dmabuf_ops_begin_cpu_access()
474 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); in vb2_dma_sg_dmabuf_ops_begin_cpu_access()
482 struct vb2_dma_sg_buf *buf = dbuf->priv; in vb2_dma_sg_dmabuf_ops_end_cpu_access() local
483 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_dmabuf_ops_end_cpu_access()
485 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); in vb2_dma_sg_dmabuf_ops_end_cpu_access()
491 struct vb2_dma_sg_buf *buf = dbuf->priv; in vb2_dma_sg_dmabuf_ops_vmap() local
493 dma_buf_map_set_vaddr(map, buf->vaddr); in vb2_dma_sg_dmabuf_ops_vmap()
520 struct vb2_dma_sg_buf *buf = buf_priv; in vb2_dma_sg_get_dmabuf() local
525 exp_info.size = buf->size; in vb2_dma_sg_get_dmabuf()
527 exp_info.priv = buf; in vb2_dma_sg_get_dmabuf()
529 if (WARN_ON(!buf->dma_sgt)) in vb2_dma_sg_get_dmabuf()
537 refcount_inc(&buf->refcount); in vb2_dma_sg_get_dmabuf()
548 struct vb2_dma_sg_buf *buf = mem_priv; in vb2_dma_sg_map_dmabuf() local
551 if (WARN_ON(!buf->db_attach)) { in vb2_dma_sg_map_dmabuf()
556 if (WARN_ON(buf->dma_sgt)) { in vb2_dma_sg_map_dmabuf()
562 sgt = dma_buf_map_attachment(buf->db_attach, buf->dma_dir); in vb2_dma_sg_map_dmabuf()
568 buf->dma_sgt = sgt; in vb2_dma_sg_map_dmabuf()
569 buf->vaddr = NULL; in vb2_dma_sg_map_dmabuf()
576 struct vb2_dma_sg_buf *buf = mem_priv; in vb2_dma_sg_unmap_dmabuf() local
577 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_unmap_dmabuf()
578 struct dma_buf_map map = DMA_BUF_MAP_INIT_VADDR(buf->vaddr); in vb2_dma_sg_unmap_dmabuf()
580 if (WARN_ON(!buf->db_attach)) { in vb2_dma_sg_unmap_dmabuf()
590 if (buf->vaddr) { in vb2_dma_sg_unmap_dmabuf()
591 dma_buf_vunmap(buf->db_attach->dmabuf, &map); in vb2_dma_sg_unmap_dmabuf()
592 buf->vaddr = NULL; in vb2_dma_sg_unmap_dmabuf()
594 dma_buf_unmap_attachment(buf->db_attach, sgt, buf->dma_dir); in vb2_dma_sg_unmap_dmabuf()
596 buf->dma_sgt = NULL; in vb2_dma_sg_unmap_dmabuf()
601 struct vb2_dma_sg_buf *buf = mem_priv; in vb2_dma_sg_detach_dmabuf() local
604 if (WARN_ON(buf->dma_sgt)) in vb2_dma_sg_detach_dmabuf()
605 vb2_dma_sg_unmap_dmabuf(buf); in vb2_dma_sg_detach_dmabuf()
608 dma_buf_detach(buf->db_attach->dmabuf, buf->db_attach); in vb2_dma_sg_detach_dmabuf()
609 kfree(buf); in vb2_dma_sg_detach_dmabuf()
615 struct vb2_dma_sg_buf *buf; in vb2_dma_sg_attach_dmabuf() local
624 buf = kzalloc(sizeof(*buf), GFP_KERNEL); in vb2_dma_sg_attach_dmabuf()
625 if (!buf) in vb2_dma_sg_attach_dmabuf()
628 buf->dev = dev; in vb2_dma_sg_attach_dmabuf()
630 dba = dma_buf_attach(dbuf, buf->dev); in vb2_dma_sg_attach_dmabuf()
633 kfree(buf); in vb2_dma_sg_attach_dmabuf()
637 buf->dma_dir = vb->vb2_queue->dma_dir; in vb2_dma_sg_attach_dmabuf()
638 buf->size = size; in vb2_dma_sg_attach_dmabuf()
639 buf->db_attach = dba; in vb2_dma_sg_attach_dmabuf()
640 buf->vb = vb; in vb2_dma_sg_attach_dmabuf()
642 return buf; in vb2_dma_sg_attach_dmabuf()
647 struct vb2_dma_sg_buf *buf = buf_priv; in vb2_dma_sg_cookie() local
649 return buf->dma_sgt; in vb2_dma_sg_cookie()