Home
last modified time | relevance | path

Searched refs:cpu_addr (Results 1 – 7 of 7) sorted by relevance

/kernel/dma/
Dops_helpers.c8 static struct page *dma_common_vaddr_to_page(void *cpu_addr) in dma_common_vaddr_to_page() argument
10 if (is_vmalloc_addr(cpu_addr)) in dma_common_vaddr_to_page()
11 return vmalloc_to_page(cpu_addr); in dma_common_vaddr_to_page()
12 return virt_to_page(cpu_addr); in dma_common_vaddr_to_page()
19 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_common_get_sgtable() argument
22 struct page *page = dma_common_vaddr_to_page(cpu_addr); in dma_common_get_sgtable()
36 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_common_mmap() argument
43 struct page *page = dma_common_vaddr_to_page(cpu_addr); in dma_common_mmap()
48 if (dma_mmap_from_dev_coherent(dev, vma, cpu_addr, size, &ret)) in dma_common_mmap()
Dremap.c9 struct page **dma_common_find_pages(void *cpu_addr) in dma_common_find_pages() argument
11 struct vm_struct *area = find_vm_area(cpu_addr); in dma_common_find_pages()
60 void dma_common_free_remap(void *cpu_addr, size_t size) in dma_common_free_remap() argument
62 struct vm_struct *area = find_vm_area(cpu_addr); in dma_common_free_remap()
65 WARN(1, "trying to free invalid coherent area: %p\n", cpu_addr); in dma_common_free_remap()
69 vunmap(cpu_addr); in dma_common_free_remap()
Dmapping.c396 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_get_sgtable_attrs() argument
402 return dma_direct_get_sgtable(dev, sgt, cpu_addr, dma_addr, in dma_get_sgtable_attrs()
406 return ops->get_sgtable(dev, sgt, cpu_addr, dma_addr, size, attrs); in dma_get_sgtable_attrs()
461 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_mmap_attrs() argument
467 return dma_direct_mmap(dev, vma, cpu_addr, dma_addr, size, in dma_mmap_attrs()
471 return ops->mmap(dev, vma, cpu_addr, dma_addr, size, attrs); in dma_mmap_attrs()
500 void *cpu_addr; in dma_alloc_attrs() local
504 if (dma_alloc_from_dev_coherent(dev, size, dma_handle, &cpu_addr)) in dma_alloc_attrs()
505 return cpu_addr; in dma_alloc_attrs()
511 cpu_addr = dma_direct_alloc(dev, size, dma_handle, flag, attrs); in dma_alloc_attrs()
[all …]
Ddirect.c327 void *cpu_addr, dma_addr_t dma_addr, unsigned long attrs) in dma_direct_free() argument
334 dma_free_contiguous(dev, cpu_addr, size); in dma_direct_free()
343 arch_dma_free(dev, size, cpu_addr, dma_addr, attrs); in dma_direct_free()
349 if (!dma_release_from_global_coherent(page_order, cpu_addr)) in dma_direct_free()
356 dma_free_from_pool(dev, cpu_addr, PAGE_ALIGN(size))) in dma_direct_free()
359 if (is_vmalloc_addr(cpu_addr)) { in dma_direct_free()
360 vunmap(cpu_addr); in dma_direct_free()
363 arch_dma_clear_uncached(cpu_addr, size); in dma_direct_free()
364 if (dma_set_encrypted(dev, cpu_addr, size)) in dma_direct_free()
542 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_direct_get_sgtable() argument
[all …]
Dpool.c241 struct gen_pool *pool, void **cpu_addr, in __dma_alloc_from_pool() argument
260 *cpu_addr = (void *)addr; in __dma_alloc_from_pool()
261 memset(*cpu_addr, 0, size); in __dma_alloc_from_pool()
266 void **cpu_addr, gfp_t gfp, in dma_alloc_from_pool() argument
273 page = __dma_alloc_from_pool(dev, size, pool, cpu_addr, in dma_alloc_from_pool()
Ddirect.h14 void *cpu_addr, dma_addr_t dma_addr, size_t size,
18 void *cpu_addr, dma_addr_t dma_addr, size_t size,
Ddummy.c8 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_dummy_mmap() argument