• Home
  • Raw
  • Download

Lines Matching refs:dev

31 static void dmam_release(struct device *dev, void *res)  in dmam_release()  argument
35 dma_free_attrs(dev, this->size, this->vaddr, this->dma_handle, in dmam_release()
39 static int dmam_match(struct device *dev, void *res, void *match_data) in dmam_match() argument
60 void dmam_free_coherent(struct device *dev, size_t size, void *vaddr, in dmam_free_coherent() argument
65 dma_free_coherent(dev, size, vaddr, dma_handle); in dmam_free_coherent()
66 WARN_ON(devres_destroy(dev, dmam_release, dmam_match, &match_data)); in dmam_free_coherent()
84 void *dmam_alloc_attrs(struct device *dev, size_t size, dma_addr_t *dma_handle, in dmam_alloc_attrs() argument
94 vaddr = dma_alloc_attrs(dev, size, dma_handle, gfp, attrs); in dmam_alloc_attrs()
105 devres_add(dev, dr); in dmam_alloc_attrs()
111 static bool dma_go_direct(struct device *dev, dma_addr_t mask, in dma_go_direct() argument
117 if (dev->dma_ops_bypass) in dma_go_direct()
118 return min_not_zero(mask, dev->bus_dma_limit) >= in dma_go_direct()
119 dma_direct_get_required_mask(dev); in dma_go_direct()
130 static inline bool dma_alloc_direct(struct device *dev, in dma_alloc_direct() argument
133 return dma_go_direct(dev, dev->coherent_dma_mask, ops); in dma_alloc_direct()
136 static inline bool dma_map_direct(struct device *dev, in dma_map_direct() argument
139 return dma_go_direct(dev, *dev->dma_mask, ops); in dma_map_direct()
142 dma_addr_t dma_map_page_attrs(struct device *dev, struct page *page, in dma_map_page_attrs() argument
146 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_page_attrs()
151 if (WARN_ON_ONCE(!dev->dma_mask)) in dma_map_page_attrs()
154 if (dma_map_direct(dev, ops) || in dma_map_page_attrs()
155 arch_dma_map_page_direct(dev, page_to_phys(page) + offset + size)) in dma_map_page_attrs()
156 addr = dma_direct_map_page(dev, page, offset, size, dir, attrs); in dma_map_page_attrs()
158 addr = ops->map_page(dev, page, offset, size, dir, attrs); in dma_map_page_attrs()
159 debug_dma_map_page(dev, page, offset, size, dir, addr, attrs); in dma_map_page_attrs()
165 void dma_unmap_page_attrs(struct device *dev, dma_addr_t addr, size_t size, in dma_unmap_page_attrs() argument
168 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_page_attrs()
171 if (dma_map_direct(dev, ops) || in dma_unmap_page_attrs()
172 arch_dma_unmap_page_direct(dev, addr + size)) in dma_unmap_page_attrs()
173 dma_direct_unmap_page(dev, addr, size, dir, attrs); in dma_unmap_page_attrs()
175 ops->unmap_page(dev, addr, size, dir, attrs); in dma_unmap_page_attrs()
176 debug_dma_unmap_page(dev, addr, size, dir); in dma_unmap_page_attrs()
180 static int __dma_map_sg_attrs(struct device *dev, struct scatterlist *sg, in __dma_map_sg_attrs() argument
183 const struct dma_map_ops *ops = get_dma_ops(dev); in __dma_map_sg_attrs()
188 if (WARN_ON_ONCE(!dev->dma_mask)) in __dma_map_sg_attrs()
191 if (dma_map_direct(dev, ops) || in __dma_map_sg_attrs()
192 arch_dma_map_sg_direct(dev, sg, nents)) in __dma_map_sg_attrs()
193 ents = dma_direct_map_sg(dev, sg, nents, dir, attrs); in __dma_map_sg_attrs()
195 ents = ops->map_sg(dev, sg, nents, dir, attrs); in __dma_map_sg_attrs()
198 debug_dma_map_sg(dev, sg, nents, ents, dir, attrs); in __dma_map_sg_attrs()
223 unsigned int dma_map_sg_attrs(struct device *dev, struct scatterlist *sg, in dma_map_sg_attrs() argument
228 ret = __dma_map_sg_attrs(dev, sg, nents, dir, attrs); in dma_map_sg_attrs()
259 int dma_map_sgtable(struct device *dev, struct sg_table *sgt, in dma_map_sgtable() argument
264 nents = __dma_map_sg_attrs(dev, sgt->sgl, sgt->orig_nents, dir, attrs); in dma_map_sgtable()
272 void dma_unmap_sg_attrs(struct device *dev, struct scatterlist *sg, in dma_unmap_sg_attrs() argument
276 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_sg_attrs()
279 debug_dma_unmap_sg(dev, sg, nents, dir); in dma_unmap_sg_attrs()
280 if (dma_map_direct(dev, ops) || in dma_unmap_sg_attrs()
281 arch_dma_unmap_sg_direct(dev, sg, nents)) in dma_unmap_sg_attrs()
282 dma_direct_unmap_sg(dev, sg, nents, dir, attrs); in dma_unmap_sg_attrs()
284 ops->unmap_sg(dev, sg, nents, dir, attrs); in dma_unmap_sg_attrs()
288 dma_addr_t dma_map_resource(struct device *dev, phys_addr_t phys_addr, in dma_map_resource() argument
291 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_resource()
296 if (WARN_ON_ONCE(!dev->dma_mask)) in dma_map_resource()
299 if (dma_map_direct(dev, ops)) in dma_map_resource()
300 addr = dma_direct_map_resource(dev, phys_addr, size, dir, attrs); in dma_map_resource()
302 addr = ops->map_resource(dev, phys_addr, size, dir, attrs); in dma_map_resource()
304 debug_dma_map_resource(dev, phys_addr, size, dir, addr, attrs); in dma_map_resource()
309 void dma_unmap_resource(struct device *dev, dma_addr_t addr, size_t size, in dma_unmap_resource() argument
312 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_resource()
315 if (!dma_map_direct(dev, ops) && ops->unmap_resource) in dma_unmap_resource()
316 ops->unmap_resource(dev, addr, size, dir, attrs); in dma_unmap_resource()
317 debug_dma_unmap_resource(dev, addr, size, dir); in dma_unmap_resource()
321 void dma_sync_single_for_cpu(struct device *dev, dma_addr_t addr, size_t size, in dma_sync_single_for_cpu() argument
324 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_single_for_cpu()
327 if (dma_map_direct(dev, ops)) in dma_sync_single_for_cpu()
328 dma_direct_sync_single_for_cpu(dev, addr, size, dir); in dma_sync_single_for_cpu()
330 ops->sync_single_for_cpu(dev, addr, size, dir); in dma_sync_single_for_cpu()
331 debug_dma_sync_single_for_cpu(dev, addr, size, dir); in dma_sync_single_for_cpu()
335 void dma_sync_single_for_device(struct device *dev, dma_addr_t addr, in dma_sync_single_for_device() argument
338 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_single_for_device()
341 if (dma_map_direct(dev, ops)) in dma_sync_single_for_device()
342 dma_direct_sync_single_for_device(dev, addr, size, dir); in dma_sync_single_for_device()
344 ops->sync_single_for_device(dev, addr, size, dir); in dma_sync_single_for_device()
345 debug_dma_sync_single_for_device(dev, addr, size, dir); in dma_sync_single_for_device()
349 void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in dma_sync_sg_for_cpu() argument
352 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_sg_for_cpu()
355 if (dma_map_direct(dev, ops)) in dma_sync_sg_for_cpu()
356 dma_direct_sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu()
358 ops->sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu()
359 debug_dma_sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu()
363 void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, in dma_sync_sg_for_device() argument
366 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_sg_for_device()
369 if (dma_map_direct(dev, ops)) in dma_sync_sg_for_device()
370 dma_direct_sync_sg_for_device(dev, sg, nelems, dir); in dma_sync_sg_for_device()
372 ops->sync_sg_for_device(dev, sg, nelems, dir); in dma_sync_sg_for_device()
373 debug_dma_sync_sg_for_device(dev, sg, nelems, dir); in dma_sync_sg_for_device()
388 int dma_get_sgtable_attrs(struct device *dev, struct sg_table *sgt, in dma_get_sgtable_attrs() argument
392 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_get_sgtable_attrs()
394 if (dma_alloc_direct(dev, ops)) in dma_get_sgtable_attrs()
395 return dma_direct_get_sgtable(dev, sgt, cpu_addr, dma_addr, in dma_get_sgtable_attrs()
399 return ops->get_sgtable(dev, sgt, cpu_addr, dma_addr, size, attrs); in dma_get_sgtable_attrs()
408 pgprot_t dma_pgprot(struct device *dev, pgprot_t prot, unsigned long attrs) in dma_pgprot() argument
410 if (force_dma_unencrypted(dev)) in dma_pgprot()
412 if (dev_is_dma_coherent(dev)) in dma_pgprot()
432 bool dma_can_mmap(struct device *dev) in dma_can_mmap() argument
434 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_can_mmap()
436 if (dma_alloc_direct(dev, ops)) in dma_can_mmap()
437 return dma_direct_can_mmap(dev); in dma_can_mmap()
455 int dma_mmap_attrs(struct device *dev, struct vm_area_struct *vma, in dma_mmap_attrs() argument
459 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_mmap_attrs()
461 if (dma_alloc_direct(dev, ops)) in dma_mmap_attrs()
462 return dma_direct_mmap(dev, vma, cpu_addr, dma_addr, size, in dma_mmap_attrs()
466 return ops->mmap(dev, vma, cpu_addr, dma_addr, size, attrs); in dma_mmap_attrs()
470 u64 dma_get_required_mask(struct device *dev) in dma_get_required_mask() argument
472 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_get_required_mask()
474 if (dma_alloc_direct(dev, ops)) in dma_get_required_mask()
475 return dma_direct_get_required_mask(dev); in dma_get_required_mask()
477 return ops->get_required_mask(dev); in dma_get_required_mask()
491 void *dma_alloc_attrs(struct device *dev, size_t size, dma_addr_t *dma_handle, in dma_alloc_attrs() argument
494 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_alloc_attrs()
497 WARN_ON_ONCE(!dev->coherent_dma_mask); in dma_alloc_attrs()
499 if (dma_alloc_from_dev_coherent(dev, size, dma_handle, &cpu_addr)) in dma_alloc_attrs()
505 if (dma_alloc_direct(dev, ops)) in dma_alloc_attrs()
506 cpu_addr = dma_direct_alloc(dev, size, dma_handle, flag, attrs); in dma_alloc_attrs()
508 cpu_addr = ops->alloc(dev, size, dma_handle, flag, attrs); in dma_alloc_attrs()
512 debug_dma_alloc_coherent(dev, size, *dma_handle, cpu_addr, attrs); in dma_alloc_attrs()
517 void dma_free_attrs(struct device *dev, size_t size, void *cpu_addr, in dma_free_attrs() argument
520 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_free_attrs()
522 if (dma_release_from_dev_coherent(dev, get_order(size), cpu_addr)) in dma_free_attrs()
536 debug_dma_free_coherent(dev, size, cpu_addr, dma_handle); in dma_free_attrs()
537 if (dma_alloc_direct(dev, ops)) in dma_free_attrs()
538 dma_direct_free(dev, size, cpu_addr, dma_handle, attrs); in dma_free_attrs()
540 ops->free(dev, size, cpu_addr, dma_handle, attrs); in dma_free_attrs()
544 static struct page *__dma_alloc_pages(struct device *dev, size_t size, in __dma_alloc_pages() argument
547 const struct dma_map_ops *ops = get_dma_ops(dev); in __dma_alloc_pages()
549 if (WARN_ON_ONCE(!dev->coherent_dma_mask)) in __dma_alloc_pages()
555 if (dma_alloc_direct(dev, ops)) in __dma_alloc_pages()
556 return dma_direct_alloc_pages(dev, size, dma_handle, dir, gfp); in __dma_alloc_pages()
559 return ops->alloc_pages(dev, size, dma_handle, dir, gfp); in __dma_alloc_pages()
562 struct page *dma_alloc_pages(struct device *dev, size_t size, in dma_alloc_pages() argument
565 struct page *page = __dma_alloc_pages(dev, size, dma_handle, dir, gfp); in dma_alloc_pages()
568 debug_dma_map_page(dev, page, 0, size, dir, *dma_handle, 0); in dma_alloc_pages()
573 static void __dma_free_pages(struct device *dev, size_t size, struct page *page, in __dma_free_pages() argument
576 const struct dma_map_ops *ops = get_dma_ops(dev); in __dma_free_pages()
579 if (dma_alloc_direct(dev, ops)) in __dma_free_pages()
580 dma_direct_free_pages(dev, size, page, dma_handle, dir); in __dma_free_pages()
582 ops->free_pages(dev, size, page, dma_handle, dir); in __dma_free_pages()
585 void dma_free_pages(struct device *dev, size_t size, struct page *page, in dma_free_pages() argument
588 debug_dma_unmap_page(dev, dma_handle, size, dir); in dma_free_pages()
589 __dma_free_pages(dev, size, page, dma_handle, dir); in dma_free_pages()
593 int dma_mmap_pages(struct device *dev, struct vm_area_struct *vma, in dma_mmap_pages() argument
606 static struct sg_table *alloc_single_sgt(struct device *dev, size_t size, in alloc_single_sgt() argument
617 page = __dma_alloc_pages(dev, size, &sgt->sgl->dma_address, dir, gfp); in alloc_single_sgt()
630 struct sg_table *dma_alloc_noncontiguous(struct device *dev, size_t size, in dma_alloc_noncontiguous() argument
633 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_alloc_noncontiguous()
640 sgt = ops->alloc_noncontiguous(dev, size, dir, gfp, attrs); in dma_alloc_noncontiguous()
642 sgt = alloc_single_sgt(dev, size, dir, gfp); in dma_alloc_noncontiguous()
646 debug_dma_map_sg(dev, sgt->sgl, sgt->orig_nents, 1, dir, attrs); in dma_alloc_noncontiguous()
652 static void free_single_sgt(struct device *dev, size_t size, in free_single_sgt() argument
655 __dma_free_pages(dev, size, sg_page(sgt->sgl), sgt->sgl->dma_address, in free_single_sgt()
661 void dma_free_noncontiguous(struct device *dev, size_t size, in dma_free_noncontiguous() argument
664 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_free_noncontiguous()
666 debug_dma_unmap_sg(dev, sgt->sgl, sgt->orig_nents, dir); in dma_free_noncontiguous()
668 ops->free_noncontiguous(dev, size, sgt, dir); in dma_free_noncontiguous()
670 free_single_sgt(dev, size, sgt, dir); in dma_free_noncontiguous()
674 void *dma_vmap_noncontiguous(struct device *dev, size_t size, in dma_vmap_noncontiguous() argument
677 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_vmap_noncontiguous()
686 void dma_vunmap_noncontiguous(struct device *dev, void *vaddr) in dma_vunmap_noncontiguous() argument
688 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_vunmap_noncontiguous()
695 int dma_mmap_noncontiguous(struct device *dev, struct vm_area_struct *vma, in dma_mmap_noncontiguous() argument
698 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_mmap_noncontiguous()
708 return dma_mmap_pages(dev, vma, size, sg_page(sgt->sgl)); in dma_mmap_noncontiguous()
712 int dma_supported(struct device *dev, u64 mask) in dma_supported() argument
714 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_supported()
721 return dma_direct_supported(dev, mask); in dma_supported()
724 return ops->dma_supported(dev, mask); in dma_supported()
729 void arch_dma_set_mask(struct device *dev, u64 mask);
731 #define arch_dma_set_mask(dev, mask) do { } while (0) argument
734 int dma_set_mask(struct device *dev, u64 mask) in dma_set_mask() argument
742 if (!dev->dma_mask || !dma_supported(dev, mask)) in dma_set_mask()
745 arch_dma_set_mask(dev, mask); in dma_set_mask()
746 *dev->dma_mask = mask; in dma_set_mask()
752 int dma_set_coherent_mask(struct device *dev, u64 mask) in dma_set_coherent_mask() argument
760 if (!dma_supported(dev, mask)) in dma_set_coherent_mask()
763 dev->coherent_dma_mask = mask; in dma_set_coherent_mask()
769 size_t dma_max_mapping_size(struct device *dev) in dma_max_mapping_size() argument
771 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_max_mapping_size()
774 if (dma_map_direct(dev, ops)) in dma_max_mapping_size()
775 size = dma_direct_max_mapping_size(dev); in dma_max_mapping_size()
777 size = ops->max_mapping_size(dev); in dma_max_mapping_size()
783 bool dma_need_sync(struct device *dev, dma_addr_t dma_addr) in dma_need_sync() argument
785 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_need_sync()
787 if (dma_map_direct(dev, ops)) in dma_need_sync()
788 return dma_direct_need_sync(dev, dma_addr); in dma_need_sync()
793 unsigned long dma_get_merge_boundary(struct device *dev) in dma_get_merge_boundary() argument
795 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_get_merge_boundary()
800 return ops->get_merge_boundary(dev); in dma_get_merge_boundary()