Lines Matching refs:dma
141 return &mem->dma; in videobuf_to_dma()
145 static void videobuf_dma_init(struct videobuf_dmabuf *dma) in videobuf_dma_init() argument
147 memset(dma, 0, sizeof(*dma)); in videobuf_dma_init()
148 dma->magic = MAGIC_DMABUF; in videobuf_dma_init()
151 static int videobuf_dma_init_user_locked(struct videobuf_dmabuf *dma, in videobuf_dma_init_user_locked() argument
158 dma->direction = direction; in videobuf_dma_init_user_locked()
159 switch (dma->direction) { in videobuf_dma_init_user_locked()
172 dma->offset = data & ~PAGE_MASK; in videobuf_dma_init_user_locked()
173 dma->size = size; in videobuf_dma_init_user_locked()
174 dma->nr_pages = last-first+1; in videobuf_dma_init_user_locked()
175 dma->pages = kmalloc_array(dma->nr_pages, sizeof(struct page *), in videobuf_dma_init_user_locked()
177 if (NULL == dma->pages) in videobuf_dma_init_user_locked()
184 data, size, dma->nr_pages); in videobuf_dma_init_user_locked()
186 err = pin_user_pages(data & PAGE_MASK, dma->nr_pages, in videobuf_dma_init_user_locked()
187 flags | FOLL_LONGTERM, dma->pages, NULL); in videobuf_dma_init_user_locked()
189 if (err != dma->nr_pages) { in videobuf_dma_init_user_locked()
190 dma->nr_pages = (err >= 0) ? err : 0; in videobuf_dma_init_user_locked()
192 dma->nr_pages); in videobuf_dma_init_user_locked()
198 static int videobuf_dma_init_user(struct videobuf_dmabuf *dma, int direction, in videobuf_dma_init_user() argument
204 ret = videobuf_dma_init_user_locked(dma, direction, data, size); in videobuf_dma_init_user()
210 static int videobuf_dma_init_kernel(struct videobuf_dmabuf *dma, int direction, in videobuf_dma_init_kernel() argument
217 dma->direction = direction; in videobuf_dma_init_kernel()
218 dma->vaddr_pages = kcalloc(nr_pages, sizeof(*dma->vaddr_pages), in videobuf_dma_init_kernel()
220 if (!dma->vaddr_pages) in videobuf_dma_init_kernel()
223 dma->dma_addr = kcalloc(nr_pages, sizeof(*dma->dma_addr), GFP_KERNEL); in videobuf_dma_init_kernel()
224 if (!dma->dma_addr) { in videobuf_dma_init_kernel()
225 kfree(dma->vaddr_pages); in videobuf_dma_init_kernel()
231 addr = dma_alloc_coherent(dma->dev, PAGE_SIZE, in videobuf_dma_init_kernel()
232 &(dma->dma_addr[i]), GFP_KERNEL); in videobuf_dma_init_kernel()
236 dma->vaddr_pages[i] = virt_to_page(addr); in videobuf_dma_init_kernel()
238 dma->vaddr = vmap(dma->vaddr_pages, nr_pages, VM_MAP | VM_IOREMAP, in videobuf_dma_init_kernel()
240 if (NULL == dma->vaddr) { in videobuf_dma_init_kernel()
246 dma->vaddr, nr_pages << PAGE_SHIFT); in videobuf_dma_init_kernel()
248 memset(dma->vaddr, 0, nr_pages << PAGE_SHIFT); in videobuf_dma_init_kernel()
249 dma->nr_pages = nr_pages; in videobuf_dma_init_kernel()
257 addr = page_address(dma->vaddr_pages[i]); in videobuf_dma_init_kernel()
258 dma_free_coherent(dma->dev, PAGE_SIZE, addr, dma->dma_addr[i]); in videobuf_dma_init_kernel()
260 kfree(dma->dma_addr); in videobuf_dma_init_kernel()
261 dma->dma_addr = NULL; in videobuf_dma_init_kernel()
262 kfree(dma->vaddr_pages); in videobuf_dma_init_kernel()
263 dma->vaddr_pages = NULL; in videobuf_dma_init_kernel()
269 static int videobuf_dma_init_overlay(struct videobuf_dmabuf *dma, int direction, in videobuf_dma_init_overlay() argument
274 dma->direction = direction; in videobuf_dma_init_overlay()
279 dma->bus_addr = addr; in videobuf_dma_init_overlay()
280 dma->nr_pages = nr_pages; in videobuf_dma_init_overlay()
285 static int videobuf_dma_map(struct device *dev, struct videobuf_dmabuf *dma) in videobuf_dma_map() argument
287 MAGIC_CHECK(dma->magic, MAGIC_DMABUF); in videobuf_dma_map()
288 BUG_ON(0 == dma->nr_pages); in videobuf_dma_map()
290 if (dma->pages) { in videobuf_dma_map()
291 dma->sglist = videobuf_pages_to_sg(dma->pages, dma->nr_pages, in videobuf_dma_map()
292 dma->offset, dma->size); in videobuf_dma_map()
294 if (dma->vaddr) { in videobuf_dma_map()
295 dma->sglist = videobuf_vmalloc_to_sg(dma->vaddr, in videobuf_dma_map()
296 dma->nr_pages); in videobuf_dma_map()
298 if (dma->bus_addr) { in videobuf_dma_map()
299 dma->sglist = vmalloc(sizeof(*dma->sglist)); in videobuf_dma_map()
300 if (NULL != dma->sglist) { in videobuf_dma_map()
301 dma->sglen = 1; in videobuf_dma_map()
302 sg_dma_address(&dma->sglist[0]) = dma->bus_addr in videobuf_dma_map()
304 dma->sglist[0].offset = dma->bus_addr & ~PAGE_MASK; in videobuf_dma_map()
305 sg_dma_len(&dma->sglist[0]) = dma->nr_pages * PAGE_SIZE; in videobuf_dma_map()
308 if (NULL == dma->sglist) { in videobuf_dma_map()
312 if (!dma->bus_addr) { in videobuf_dma_map()
313 dma->sglen = dma_map_sg(dev, dma->sglist, in videobuf_dma_map()
314 dma->nr_pages, dma->direction); in videobuf_dma_map()
315 if (0 == dma->sglen) { in videobuf_dma_map()
318 vfree(dma->sglist); in videobuf_dma_map()
319 dma->sglist = NULL; in videobuf_dma_map()
320 dma->sglen = 0; in videobuf_dma_map()
328 int videobuf_dma_unmap(struct device *dev, struct videobuf_dmabuf *dma) in videobuf_dma_unmap() argument
330 MAGIC_CHECK(dma->magic, MAGIC_DMABUF); in videobuf_dma_unmap()
332 if (!dma->sglen) in videobuf_dma_unmap()
335 dma_unmap_sg(dev, dma->sglist, dma->nr_pages, dma->direction); in videobuf_dma_unmap()
337 vfree(dma->sglist); in videobuf_dma_unmap()
338 dma->sglist = NULL; in videobuf_dma_unmap()
339 dma->sglen = 0; in videobuf_dma_unmap()
345 int videobuf_dma_free(struct videobuf_dmabuf *dma) in videobuf_dma_free() argument
348 MAGIC_CHECK(dma->magic, MAGIC_DMABUF); in videobuf_dma_free()
349 BUG_ON(dma->sglen); in videobuf_dma_free()
351 if (dma->pages) { in videobuf_dma_free()
352 unpin_user_pages_dirty_lock(dma->pages, dma->nr_pages, in videobuf_dma_free()
353 dma->direction == DMA_FROM_DEVICE); in videobuf_dma_free()
354 kfree(dma->pages); in videobuf_dma_free()
355 dma->pages = NULL; in videobuf_dma_free()
358 if (dma->dma_addr) { in videobuf_dma_free()
359 for (i = 0; i < dma->nr_pages; i++) { in videobuf_dma_free()
362 addr = page_address(dma->vaddr_pages[i]); in videobuf_dma_free()
363 dma_free_coherent(dma->dev, PAGE_SIZE, addr, in videobuf_dma_free()
364 dma->dma_addr[i]); in videobuf_dma_free()
366 kfree(dma->dma_addr); in videobuf_dma_free()
367 dma->dma_addr = NULL; in videobuf_dma_free()
368 kfree(dma->vaddr_pages); in videobuf_dma_free()
369 dma->vaddr_pages = NULL; in videobuf_dma_free()
370 vunmap(dma->vaddr); in videobuf_dma_free()
371 dma->vaddr = NULL; in videobuf_dma_free()
374 if (dma->bus_addr) in videobuf_dma_free()
375 dma->bus_addr = 0; in videobuf_dma_free()
376 dma->direction = DMA_NONE; in videobuf_dma_free()
479 videobuf_dma_init(&mem->dma); in __videobuf_alloc_vb()
495 return mem->dma.vaddr; in __videobuf_to_vaddr()
511 if (!mem->dma.dev) in __videobuf_iolock()
512 mem->dma.dev = q->dev; in __videobuf_iolock()
514 WARN_ON(mem->dma.dev != q->dev); in __videobuf_iolock()
522 err = videobuf_dma_init_kernel(&mem->dma, in __videobuf_iolock()
529 err = videobuf_dma_init_user(&mem->dma, in __videobuf_iolock()
540 err = videobuf_dma_init_user_locked(&mem->dma, in __videobuf_iolock()
558 err = videobuf_dma_init_overlay(&mem->dma, DMA_FROM_DEVICE, in __videobuf_iolock()
566 err = videobuf_dma_map(q->dev, &mem->dma); in __videobuf_iolock()
577 BUG_ON(!mem || !mem->dma.sglen); in __videobuf_sync()
580 MAGIC_CHECK(mem->dma.magic, MAGIC_DMABUF); in __videobuf_sync()
582 dma_sync_sg_for_cpu(q->dev, mem->dma.sglist, in __videobuf_sync()
583 mem->dma.nr_pages, mem->dma.direction); in __videobuf_sync()