Home
last modified time | relevance | path

Searched refs:cpu_addr (Results 1 – 8 of 8) sorted by relevance

/kernel/dma/
Dops_helpers.c8 static struct page *dma_common_vaddr_to_page(void *cpu_addr) in dma_common_vaddr_to_page() argument
10 if (is_vmalloc_addr(cpu_addr)) in dma_common_vaddr_to_page()
11 return vmalloc_to_page(cpu_addr); in dma_common_vaddr_to_page()
12 return virt_to_page(cpu_addr); in dma_common_vaddr_to_page()
19 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_common_get_sgtable() argument
22 struct page *page = dma_common_vaddr_to_page(cpu_addr); in dma_common_get_sgtable()
35 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_common_mmap() argument
42 struct page *page = dma_common_vaddr_to_page(cpu_addr); in dma_common_mmap()
47 if (dma_mmap_from_dev_coherent(dev, vma, cpu_addr, size, &ret)) in dma_common_mmap()
Dremap.c9 struct page **dma_common_find_pages(void *cpu_addr) in dma_common_find_pages() argument
11 struct vm_struct *area = find_vm_area(cpu_addr); in dma_common_find_pages()
60 void dma_common_free_remap(void *cpu_addr, size_t size) in dma_common_free_remap() argument
62 struct vm_struct *area = find_vm_area(cpu_addr); in dma_common_free_remap()
65 WARN(1, "trying to free invalid coherent area: %p\n", cpu_addr); in dma_common_free_remap()
69 unmap_kernel_range((unsigned long)cpu_addr, PAGE_ALIGN(size)); in dma_common_free_remap()
70 vunmap(cpu_addr); in dma_common_free_remap()
Dmapping.c322 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_get_sgtable_attrs() argument
328 return dma_direct_get_sgtable(dev, sgt, cpu_addr, dma_addr, in dma_get_sgtable_attrs()
332 return ops->get_sgtable(dev, sgt, cpu_addr, dma_addr, size, attrs); in dma_get_sgtable_attrs()
389 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_mmap_attrs() argument
395 return dma_direct_mmap(dev, vma, cpu_addr, dma_addr, size, in dma_mmap_attrs()
399 return ops->mmap(dev, vma, cpu_addr, dma_addr, size, attrs); in dma_mmap_attrs()
428 void *cpu_addr; in dma_alloc_attrs() local
432 if (dma_alloc_from_dev_coherent(dev, size, dma_handle, &cpu_addr)) in dma_alloc_attrs()
433 return cpu_addr; in dma_alloc_attrs()
439 cpu_addr = dma_direct_alloc(dev, size, dma_handle, flag, attrs); in dma_alloc_attrs()
[all …]
Ddirect.c247 void *cpu_addr, dma_addr_t dma_addr, unsigned long attrs) in dma_direct_free() argument
252 dma_free_contiguous(dev, cpu_addr, size); in dma_direct_free()
259 arch_dma_free(dev, size, cpu_addr, dma_addr, attrs); in dma_direct_free()
265 dma_free_from_pool(dev, cpu_addr, PAGE_ALIGN(size))) in dma_direct_free()
269 set_memory_encrypted((unsigned long)cpu_addr, PFN_UP(size)); in dma_direct_free()
271 if (IS_ENABLED(CONFIG_DMA_REMAP) && is_vmalloc_addr(cpu_addr)) in dma_direct_free()
272 vunmap(cpu_addr); in dma_direct_free()
274 arch_dma_clear_uncached(cpu_addr, size); in dma_direct_free()
432 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_direct_get_sgtable() argument
451 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_direct_mmap() argument
[all …]
Dpool.c244 struct gen_pool *pool, void **cpu_addr, in __dma_alloc_from_pool() argument
263 *cpu_addr = (void *)addr; in __dma_alloc_from_pool()
264 memset(*cpu_addr, 0, size); in __dma_alloc_from_pool()
269 void **cpu_addr, gfp_t gfp, in dma_alloc_from_pool() argument
276 page = __dma_alloc_from_pool(dev, size, pool, cpu_addr, in dma_alloc_from_pool()
Dvirt.c23 void *cpu_addr, dma_addr_t dma_addr, in dma_virt_free() argument
26 free_pages((unsigned long)cpu_addr, get_order(size)); in dma_virt_free()
Ddirect.h13 void *cpu_addr, dma_addr_t dma_addr, size_t size,
17 void *cpu_addr, dma_addr_t dma_addr, size_t size,
Ddummy.c8 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_dummy_mmap() argument