• Home
  • Raw
  • Download

Lines Matching refs:size

34 	size_t		size;  member
44 dma_free_attrs(dev, this->size, this->vaddr, this->dma_handle, in dmam_release()
53 WARN_ON(this->size != match->size || in dmam_match()
69 void dmam_free_coherent(struct device *dev, size_t size, void *vaddr, in dmam_free_coherent() argument
72 struct dma_devres match_data = { size, vaddr, dma_handle }; in dmam_free_coherent()
75 dma_free_coherent(dev, size, vaddr, dma_handle); in dmam_free_coherent()
93 void *dmam_alloc_attrs(struct device *dev, size_t size, dma_addr_t *dma_handle, in dmam_alloc_attrs() argument
103 vaddr = dma_alloc_attrs(dev, size, dma_handle, gfp, attrs); in dmam_alloc_attrs()
111 dr->size = size; in dmam_alloc_attrs()
156 size_t offset, size_t size, enum dma_data_direction dir, in dma_map_page_attrs() argument
168 arch_dma_map_page_direct(dev, page_to_phys(page) + offset + size)) in dma_map_page_attrs()
169 addr = dma_direct_map_page(dev, page, offset, size, dir, attrs); in dma_map_page_attrs()
171 addr = iommu_dma_map_page(dev, page, offset, size, dir, attrs); in dma_map_page_attrs()
173 addr = ops->map_page(dev, page, offset, size, dir, attrs); in dma_map_page_attrs()
174 kmsan_handle_dma(page, offset, size, dir); in dma_map_page_attrs()
175 trace_dma_map_page(dev, page_to_phys(page) + offset, addr, size, dir, in dma_map_page_attrs()
177 debug_dma_map_page(dev, page, offset, size, dir, addr, attrs); in dma_map_page_attrs()
183 void dma_unmap_page_attrs(struct device *dev, dma_addr_t addr, size_t size, in dma_unmap_page_attrs() argument
190 arch_dma_unmap_page_direct(dev, addr + size)) in dma_unmap_page_attrs()
191 dma_direct_unmap_page(dev, addr, size, dir, attrs); in dma_unmap_page_attrs()
193 iommu_dma_unmap_page(dev, addr, size, dir, attrs); in dma_unmap_page_attrs()
195 ops->unmap_page(dev, addr, size, dir, attrs); in dma_unmap_page_attrs()
196 trace_dma_unmap_page(dev, addr, size, dir, attrs); in dma_unmap_page_attrs()
197 debug_dma_unmap_page(dev, addr, size, dir); in dma_unmap_page_attrs()
322 size_t size, enum dma_data_direction dir, unsigned long attrs) in dma_map_resource() argument
333 addr = dma_direct_map_resource(dev, phys_addr, size, dir, attrs); in dma_map_resource()
335 addr = iommu_dma_map_resource(dev, phys_addr, size, dir, attrs); in dma_map_resource()
337 addr = ops->map_resource(dev, phys_addr, size, dir, attrs); in dma_map_resource()
339 trace_dma_map_resource(dev, phys_addr, addr, size, dir, attrs); in dma_map_resource()
340 debug_dma_map_resource(dev, phys_addr, size, dir, addr, attrs); in dma_map_resource()
345 void dma_unmap_resource(struct device *dev, dma_addr_t addr, size_t size, in dma_unmap_resource() argument
354 iommu_dma_unmap_resource(dev, addr, size, dir, attrs); in dma_unmap_resource()
356 ops->unmap_resource(dev, addr, size, dir, attrs); in dma_unmap_resource()
357 trace_dma_unmap_resource(dev, addr, size, dir, attrs); in dma_unmap_resource()
358 debug_dma_unmap_resource(dev, addr, size, dir); in dma_unmap_resource()
363 void __dma_sync_single_for_cpu(struct device *dev, dma_addr_t addr, size_t size, in __dma_sync_single_for_cpu() argument
370 dma_direct_sync_single_for_cpu(dev, addr, size, dir); in __dma_sync_single_for_cpu()
372 iommu_dma_sync_single_for_cpu(dev, addr, size, dir); in __dma_sync_single_for_cpu()
374 ops->sync_single_for_cpu(dev, addr, size, dir); in __dma_sync_single_for_cpu()
375 trace_dma_sync_single_for_cpu(dev, addr, size, dir); in __dma_sync_single_for_cpu()
376 debug_dma_sync_single_for_cpu(dev, addr, size, dir); in __dma_sync_single_for_cpu()
381 size_t size, enum dma_data_direction dir) in __dma_sync_single_for_device() argument
387 dma_direct_sync_single_for_device(dev, addr, size, dir); in __dma_sync_single_for_device()
389 iommu_dma_sync_single_for_device(dev, addr, size, dir); in __dma_sync_single_for_device()
391 ops->sync_single_for_device(dev, addr, size, dir); in __dma_sync_single_for_device()
392 trace_dma_sync_single_for_device(dev, addr, size, dir); in __dma_sync_single_for_device()
393 debug_dma_sync_single_for_device(dev, addr, size, dir); in __dma_sync_single_for_device()
501 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_get_sgtable_attrs() argument
508 size, attrs); in dma_get_sgtable_attrs()
511 size, attrs); in dma_get_sgtable_attrs()
514 return ops->get_sgtable(dev, sgt, cpu_addr, dma_addr, size, attrs); in dma_get_sgtable_attrs()
568 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_mmap_attrs() argument
574 return dma_direct_mmap(dev, vma, cpu_addr, dma_addr, size, in dma_mmap_attrs()
577 return iommu_dma_mmap(dev, vma, cpu_addr, dma_addr, size, in dma_mmap_attrs()
581 return ops->mmap(dev, vma, cpu_addr, dma_addr, size, attrs); in dma_mmap_attrs()
610 void *dma_alloc_attrs(struct device *dev, size_t size, dma_addr_t *dma_handle, in dma_alloc_attrs() argument
626 if (dma_alloc_from_dev_coherent(dev, size, dma_handle, &cpu_addr)) { in dma_alloc_attrs()
627 trace_dma_alloc(dev, cpu_addr, *dma_handle, size, in dma_alloc_attrs()
636 cpu_addr = dma_direct_alloc(dev, size, dma_handle, flag, attrs); in dma_alloc_attrs()
638 cpu_addr = iommu_dma_alloc(dev, size, dma_handle, flag, attrs); in dma_alloc_attrs()
640 cpu_addr = ops->alloc(dev, size, dma_handle, flag, attrs); in dma_alloc_attrs()
642 trace_dma_alloc(dev, NULL, 0, size, DMA_BIDIRECTIONAL, flag, in dma_alloc_attrs()
647 trace_dma_alloc(dev, cpu_addr, *dma_handle, size, DMA_BIDIRECTIONAL, in dma_alloc_attrs()
649 debug_dma_alloc_coherent(dev, size, *dma_handle, cpu_addr, attrs); in dma_alloc_attrs()
654 void dma_free_attrs(struct device *dev, size_t size, void *cpu_addr, in dma_free_attrs() argument
659 if (dma_release_from_dev_coherent(dev, get_order(size), cpu_addr)) in dma_free_attrs()
670 trace_dma_free(dev, cpu_addr, dma_handle, size, DMA_BIDIRECTIONAL, in dma_free_attrs()
675 debug_dma_free_coherent(dev, size, cpu_addr, dma_handle); in dma_free_attrs()
677 dma_direct_free(dev, size, cpu_addr, dma_handle, attrs); in dma_free_attrs()
679 iommu_dma_free(dev, size, cpu_addr, dma_handle, attrs); in dma_free_attrs()
681 ops->free(dev, size, cpu_addr, dma_handle, attrs); in dma_free_attrs()
685 static struct page *__dma_alloc_pages(struct device *dev, size_t size, in __dma_alloc_pages() argument
697 size = PAGE_ALIGN(size); in __dma_alloc_pages()
699 return dma_direct_alloc_pages(dev, size, dma_handle, dir, gfp); in __dma_alloc_pages()
701 return dma_common_alloc_pages(dev, size, dma_handle, dir, gfp); in __dma_alloc_pages()
704 return ops->alloc_pages_op(dev, size, dma_handle, dir, gfp); in __dma_alloc_pages()
707 struct page *dma_alloc_pages(struct device *dev, size_t size, in dma_alloc_pages() argument
710 struct page *page = __dma_alloc_pages(dev, size, dma_handle, dir, gfp); in dma_alloc_pages()
714 size, dir, gfp, 0); in dma_alloc_pages()
715 debug_dma_alloc_pages(dev, page, size, dir, *dma_handle, 0); in dma_alloc_pages()
717 trace_dma_alloc_pages(dev, NULL, 0, size, dir, gfp, 0); in dma_alloc_pages()
723 static void __dma_free_pages(struct device *dev, size_t size, struct page *page, in __dma_free_pages() argument
728 size = PAGE_ALIGN(size); in __dma_free_pages()
730 dma_direct_free_pages(dev, size, page, dma_handle, dir); in __dma_free_pages()
732 dma_common_free_pages(dev, size, page, dma_handle, dir); in __dma_free_pages()
734 ops->free_pages(dev, size, page, dma_handle, dir); in __dma_free_pages()
737 void dma_free_pages(struct device *dev, size_t size, struct page *page, in dma_free_pages() argument
740 trace_dma_free_pages(dev, page_to_virt(page), dma_handle, size, dir, 0); in dma_free_pages()
741 debug_dma_free_pages(dev, page, size, dir, dma_handle); in dma_free_pages()
742 __dma_free_pages(dev, size, page, dma_handle, dir); in dma_free_pages()
747 size_t size, struct page *page) in dma_mmap_pages() argument
749 unsigned long count = PAGE_ALIGN(size) >> PAGE_SHIFT; in dma_mmap_pages()
759 static struct sg_table *alloc_single_sgt(struct device *dev, size_t size, in alloc_single_sgt() argument
770 page = __dma_alloc_pages(dev, size, &sgt->sgl->dma_address, dir, gfp); in alloc_single_sgt()
773 sg_set_page(sgt->sgl, page, PAGE_ALIGN(size), 0); in alloc_single_sgt()
783 struct sg_table *dma_alloc_noncontiguous(struct device *dev, size_t size, in dma_alloc_noncontiguous() argument
794 sgt = iommu_dma_alloc_noncontiguous(dev, size, dir, gfp, attrs); in dma_alloc_noncontiguous()
796 sgt = alloc_single_sgt(dev, size, dir, gfp); in dma_alloc_noncontiguous()
800 trace_dma_alloc_sgt(dev, sgt, size, dir, gfp, attrs); in dma_alloc_noncontiguous()
803 trace_dma_alloc_sgt_err(dev, NULL, 0, size, dir, gfp, attrs); in dma_alloc_noncontiguous()
809 static void free_single_sgt(struct device *dev, size_t size, in free_single_sgt() argument
812 __dma_free_pages(dev, size, sg_page(sgt->sgl), sgt->sgl->dma_address, in free_single_sgt()
818 void dma_free_noncontiguous(struct device *dev, size_t size, in dma_free_noncontiguous() argument
821 trace_dma_free_sgt(dev, sgt, size, dir); in dma_free_noncontiguous()
825 iommu_dma_free_noncontiguous(dev, size, sgt, dir); in dma_free_noncontiguous()
827 free_single_sgt(dev, size, sgt, dir); in dma_free_noncontiguous()
831 void *dma_vmap_noncontiguous(struct device *dev, size_t size, in dma_vmap_noncontiguous() argument
836 return iommu_dma_vmap_noncontiguous(dev, size, sgt); in dma_vmap_noncontiguous()
850 size_t size, struct sg_table *sgt) in dma_mmap_noncontiguous() argument
853 return iommu_dma_mmap_noncontiguous(dev, vma, size, sgt); in dma_mmap_noncontiguous()
854 return dma_mmap_pages(dev, vma, size, sg_page(sgt->sgl)); in dma_mmap_noncontiguous()
965 size_t size = SIZE_MAX; in dma_max_mapping_size() local
968 size = dma_direct_max_mapping_size(dev); in dma_max_mapping_size()
970 size = iommu_dma_max_mapping_size(dev); in dma_max_mapping_size()
972 size = ops->max_mapping_size(dev); in dma_max_mapping_size()
974 return size; in dma_max_mapping_size()
981 size_t size = SIZE_MAX; in dma_opt_mapping_size() local
984 size = iommu_dma_opt_mapping_size(); in dma_opt_mapping_size()
986 size = ops->opt_mapping_size(); in dma_opt_mapping_size()
988 return min(dma_max_mapping_size(dev), size); in dma_opt_mapping_size()