Home
last modified time | relevance | path

Searched refs:cpu_addr (Results 1 – 7 of 7) sorted by relevance

/kernel/dma/
Dops_helpers.c8 static struct page *dma_common_vaddr_to_page(void *cpu_addr) in dma_common_vaddr_to_page() argument
10 if (is_vmalloc_addr(cpu_addr)) in dma_common_vaddr_to_page()
11 return vmalloc_to_page(cpu_addr); in dma_common_vaddr_to_page()
12 return virt_to_page(cpu_addr); in dma_common_vaddr_to_page()
19 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_common_get_sgtable() argument
22 struct page *page = dma_common_vaddr_to_page(cpu_addr); in dma_common_get_sgtable()
35 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_common_mmap() argument
42 struct page *page = dma_common_vaddr_to_page(cpu_addr); in dma_common_mmap()
47 if (dma_mmap_from_dev_coherent(dev, vma, cpu_addr, size, &ret)) in dma_common_mmap()
Dremap.c9 struct page **dma_common_find_pages(void *cpu_addr) in dma_common_find_pages() argument
11 struct vm_struct *area = find_vm_area(cpu_addr); in dma_common_find_pages()
60 void dma_common_free_remap(void *cpu_addr, size_t size) in dma_common_free_remap() argument
62 struct vm_struct *area = find_vm_area(cpu_addr); in dma_common_free_remap()
65 WARN(1, "trying to free invalid coherent area: %p\n", cpu_addr); in dma_common_free_remap()
69 vunmap(cpu_addr); in dma_common_free_remap()
Dmapping.c389 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_get_sgtable_attrs() argument
395 return dma_direct_get_sgtable(dev, sgt, cpu_addr, dma_addr, in dma_get_sgtable_attrs()
399 return ops->get_sgtable(dev, sgt, cpu_addr, dma_addr, size, attrs); in dma_get_sgtable_attrs()
456 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_mmap_attrs() argument
462 return dma_direct_mmap(dev, vma, cpu_addr, dma_addr, size, in dma_mmap_attrs()
466 return ops->mmap(dev, vma, cpu_addr, dma_addr, size, attrs); in dma_mmap_attrs()
495 void *cpu_addr; in dma_alloc_attrs() local
499 if (dma_alloc_from_dev_coherent(dev, size, dma_handle, &cpu_addr)) in dma_alloc_attrs()
500 return cpu_addr; in dma_alloc_attrs()
506 cpu_addr = dma_direct_alloc(dev, size, dma_handle, flag, attrs); in dma_alloc_attrs()
[all …]
Ddirect.c291 void *cpu_addr, dma_addr_t dma_addr, unsigned long attrs) in dma_direct_free() argument
298 dma_free_contiguous(dev, cpu_addr, size); in dma_direct_free()
307 arch_dma_free(dev, size, cpu_addr, dma_addr, attrs); in dma_direct_free()
313 if (!dma_release_from_global_coherent(page_order, cpu_addr)) in dma_direct_free()
320 dma_free_from_pool(dev, cpu_addr, PAGE_ALIGN(size))) in dma_direct_free()
323 if (IS_ENABLED(CONFIG_DMA_REMAP) && is_vmalloc_addr(cpu_addr)) { in dma_direct_free()
324 vunmap(cpu_addr); in dma_direct_free()
327 arch_dma_clear_uncached(cpu_addr, size); in dma_direct_free()
328 if (dma_set_encrypted(dev, cpu_addr, size)) in dma_direct_free()
476 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_direct_get_sgtable() argument
[all …]
Dpool.c241 struct gen_pool *pool, void **cpu_addr, in __dma_alloc_from_pool() argument
260 *cpu_addr = (void *)addr; in __dma_alloc_from_pool()
261 memset(*cpu_addr, 0, size); in __dma_alloc_from_pool()
266 void **cpu_addr, gfp_t gfp, in dma_alloc_from_pool() argument
273 page = __dma_alloc_from_pool(dev, size, pool, cpu_addr, in dma_alloc_from_pool()
Ddirect.h13 void *cpu_addr, dma_addr_t dma_addr, size_t size,
17 void *cpu_addr, dma_addr_t dma_addr, size_t size,
Ddummy.c8 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_dummy_mmap() argument