Lines Matching refs:dev
29 static void dmam_release(struct device *dev, void *res) in dmam_release() argument
33 dma_free_attrs(dev, this->size, this->vaddr, this->dma_handle, in dmam_release()
37 static int dmam_match(struct device *dev, void *res, void *match_data) in dmam_match() argument
58 void dmam_free_coherent(struct device *dev, size_t size, void *vaddr, in dmam_free_coherent() argument
63 dma_free_coherent(dev, size, vaddr, dma_handle); in dmam_free_coherent()
64 WARN_ON(devres_destroy(dev, dmam_release, dmam_match, &match_data)); in dmam_free_coherent()
82 void *dmam_alloc_attrs(struct device *dev, size_t size, dma_addr_t *dma_handle, in dmam_alloc_attrs() argument
92 vaddr = dma_alloc_attrs(dev, size, dma_handle, gfp, attrs); in dmam_alloc_attrs()
103 devres_add(dev, dr); in dmam_alloc_attrs()
109 static bool dma_go_direct(struct device *dev, dma_addr_t mask, in dma_go_direct() argument
115 if (dev->dma_ops_bypass) in dma_go_direct()
116 return min_not_zero(mask, dev->bus_dma_limit) >= in dma_go_direct()
117 dma_direct_get_required_mask(dev); in dma_go_direct()
128 static inline bool dma_alloc_direct(struct device *dev, in dma_alloc_direct() argument
131 return dma_go_direct(dev, dev->coherent_dma_mask, ops); in dma_alloc_direct()
134 static inline bool dma_map_direct(struct device *dev, in dma_map_direct() argument
137 return dma_go_direct(dev, *dev->dma_mask, ops); in dma_map_direct()
140 dma_addr_t dma_map_page_attrs(struct device *dev, struct page *page, in dma_map_page_attrs() argument
144 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_page_attrs()
149 if (WARN_ON_ONCE(!dev->dma_mask)) in dma_map_page_attrs()
152 if (dma_map_direct(dev, ops)) in dma_map_page_attrs()
153 addr = dma_direct_map_page(dev, page, offset, size, dir, attrs); in dma_map_page_attrs()
155 addr = ops->map_page(dev, page, offset, size, dir, attrs); in dma_map_page_attrs()
156 debug_dma_map_page(dev, page, offset, size, dir, addr); in dma_map_page_attrs()
162 void dma_unmap_page_attrs(struct device *dev, dma_addr_t addr, size_t size, in dma_unmap_page_attrs() argument
165 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_page_attrs()
168 if (dma_map_direct(dev, ops)) in dma_unmap_page_attrs()
169 dma_direct_unmap_page(dev, addr, size, dir, attrs); in dma_unmap_page_attrs()
171 ops->unmap_page(dev, addr, size, dir, attrs); in dma_unmap_page_attrs()
172 debug_dma_unmap_page(dev, addr, size, dir); in dma_unmap_page_attrs()
180 int dma_map_sg_attrs(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg_attrs() argument
183 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_sg_attrs()
188 if (WARN_ON_ONCE(!dev->dma_mask)) in dma_map_sg_attrs()
191 if (dma_map_direct(dev, ops)) in dma_map_sg_attrs()
192 ents = dma_direct_map_sg(dev, sg, nents, dir, attrs); in dma_map_sg_attrs()
194 ents = ops->map_sg(dev, sg, nents, dir, attrs); in dma_map_sg_attrs()
196 debug_dma_map_sg(dev, sg, nents, ents, dir); in dma_map_sg_attrs()
202 void dma_unmap_sg_attrs(struct device *dev, struct scatterlist *sg, in dma_unmap_sg_attrs() argument
206 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_sg_attrs()
209 debug_dma_unmap_sg(dev, sg, nents, dir); in dma_unmap_sg_attrs()
210 if (dma_map_direct(dev, ops)) in dma_unmap_sg_attrs()
211 dma_direct_unmap_sg(dev, sg, nents, dir, attrs); in dma_unmap_sg_attrs()
213 ops->unmap_sg(dev, sg, nents, dir, attrs); in dma_unmap_sg_attrs()
217 dma_addr_t dma_map_resource(struct device *dev, phys_addr_t phys_addr, in dma_map_resource() argument
220 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_resource()
225 if (WARN_ON_ONCE(!dev->dma_mask)) in dma_map_resource()
232 if (dma_map_direct(dev, ops)) in dma_map_resource()
233 addr = dma_direct_map_resource(dev, phys_addr, size, dir, attrs); in dma_map_resource()
235 addr = ops->map_resource(dev, phys_addr, size, dir, attrs); in dma_map_resource()
237 debug_dma_map_resource(dev, phys_addr, size, dir, addr); in dma_map_resource()
242 void dma_unmap_resource(struct device *dev, dma_addr_t addr, size_t size, in dma_unmap_resource() argument
245 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_resource()
248 if (!dma_map_direct(dev, ops) && ops->unmap_resource) in dma_unmap_resource()
249 ops->unmap_resource(dev, addr, size, dir, attrs); in dma_unmap_resource()
250 debug_dma_unmap_resource(dev, addr, size, dir); in dma_unmap_resource()
254 void dma_sync_single_for_cpu(struct device *dev, dma_addr_t addr, size_t size, in dma_sync_single_for_cpu() argument
257 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_single_for_cpu()
260 if (dma_map_direct(dev, ops)) in dma_sync_single_for_cpu()
261 dma_direct_sync_single_for_cpu(dev, addr, size, dir); in dma_sync_single_for_cpu()
263 ops->sync_single_for_cpu(dev, addr, size, dir); in dma_sync_single_for_cpu()
264 debug_dma_sync_single_for_cpu(dev, addr, size, dir); in dma_sync_single_for_cpu()
268 void dma_sync_single_for_device(struct device *dev, dma_addr_t addr, in dma_sync_single_for_device() argument
271 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_single_for_device()
274 if (dma_map_direct(dev, ops)) in dma_sync_single_for_device()
275 dma_direct_sync_single_for_device(dev, addr, size, dir); in dma_sync_single_for_device()
277 ops->sync_single_for_device(dev, addr, size, dir); in dma_sync_single_for_device()
278 debug_dma_sync_single_for_device(dev, addr, size, dir); in dma_sync_single_for_device()
282 void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in dma_sync_sg_for_cpu() argument
285 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_sg_for_cpu()
288 if (dma_map_direct(dev, ops)) in dma_sync_sg_for_cpu()
289 dma_direct_sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu()
291 ops->sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu()
292 debug_dma_sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu()
296 void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, in dma_sync_sg_for_device() argument
299 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_sg_for_device()
302 if (dma_map_direct(dev, ops)) in dma_sync_sg_for_device()
303 dma_direct_sync_sg_for_device(dev, sg, nelems, dir); in dma_sync_sg_for_device()
305 ops->sync_sg_for_device(dev, sg, nelems, dir); in dma_sync_sg_for_device()
306 debug_dma_sync_sg_for_device(dev, sg, nelems, dir); in dma_sync_sg_for_device()
321 int dma_get_sgtable_attrs(struct device *dev, struct sg_table *sgt, in dma_get_sgtable_attrs() argument
325 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_get_sgtable_attrs()
327 if (dma_alloc_direct(dev, ops)) in dma_get_sgtable_attrs()
328 return dma_direct_get_sgtable(dev, sgt, cpu_addr, dma_addr, in dma_get_sgtable_attrs()
332 return ops->get_sgtable(dev, sgt, cpu_addr, dma_addr, size, attrs); in dma_get_sgtable_attrs()
341 pgprot_t dma_pgprot(struct device *dev, pgprot_t prot, unsigned long attrs) in dma_pgprot() argument
343 if (force_dma_unencrypted(dev)) in dma_pgprot()
345 if (dev_is_dma_coherent(dev)) in dma_pgprot()
365 bool dma_can_mmap(struct device *dev) in dma_can_mmap() argument
367 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_can_mmap()
369 if (dma_alloc_direct(dev, ops)) in dma_can_mmap()
370 return dma_direct_can_mmap(dev); in dma_can_mmap()
388 int dma_mmap_attrs(struct device *dev, struct vm_area_struct *vma, in dma_mmap_attrs() argument
392 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_mmap_attrs()
394 if (dma_alloc_direct(dev, ops)) in dma_mmap_attrs()
395 return dma_direct_mmap(dev, vma, cpu_addr, dma_addr, size, in dma_mmap_attrs()
399 return ops->mmap(dev, vma, cpu_addr, dma_addr, size, attrs); in dma_mmap_attrs()
403 u64 dma_get_required_mask(struct device *dev) in dma_get_required_mask() argument
405 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_get_required_mask()
407 if (dma_alloc_direct(dev, ops)) in dma_get_required_mask()
408 return dma_direct_get_required_mask(dev); in dma_get_required_mask()
410 return ops->get_required_mask(dev); in dma_get_required_mask()
424 void *dma_alloc_attrs(struct device *dev, size_t size, dma_addr_t *dma_handle, in dma_alloc_attrs() argument
427 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_alloc_attrs()
430 WARN_ON_ONCE(!dev->coherent_dma_mask); in dma_alloc_attrs()
432 if (dma_alloc_from_dev_coherent(dev, size, dma_handle, &cpu_addr)) in dma_alloc_attrs()
438 if (dma_alloc_direct(dev, ops)) in dma_alloc_attrs()
439 cpu_addr = dma_direct_alloc(dev, size, dma_handle, flag, attrs); in dma_alloc_attrs()
441 cpu_addr = ops->alloc(dev, size, dma_handle, flag, attrs); in dma_alloc_attrs()
445 debug_dma_alloc_coherent(dev, size, *dma_handle, cpu_addr); in dma_alloc_attrs()
450 void dma_free_attrs(struct device *dev, size_t size, void *cpu_addr, in dma_free_attrs() argument
453 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_free_attrs()
455 if (dma_release_from_dev_coherent(dev, get_order(size), cpu_addr)) in dma_free_attrs()
469 debug_dma_free_coherent(dev, size, cpu_addr, dma_handle); in dma_free_attrs()
470 if (dma_alloc_direct(dev, ops)) in dma_free_attrs()
471 dma_direct_free(dev, size, cpu_addr, dma_handle, attrs); in dma_free_attrs()
473 ops->free(dev, size, cpu_addr, dma_handle, attrs); in dma_free_attrs()
477 struct page *dma_alloc_pages(struct device *dev, size_t size, in dma_alloc_pages() argument
480 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_alloc_pages()
483 if (WARN_ON_ONCE(!dev->coherent_dma_mask)) in dma_alloc_pages()
489 if (dma_alloc_direct(dev, ops)) in dma_alloc_pages()
490 page = dma_direct_alloc_pages(dev, size, dma_handle, dir, gfp); in dma_alloc_pages()
492 page = ops->alloc_pages(dev, size, dma_handle, dir, gfp); in dma_alloc_pages()
496 debug_dma_map_page(dev, page, 0, size, dir, *dma_handle); in dma_alloc_pages()
502 void dma_free_pages(struct device *dev, size_t size, struct page *page, in dma_free_pages() argument
505 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_free_pages()
508 debug_dma_unmap_page(dev, dma_handle, size, dir); in dma_free_pages()
510 if (dma_alloc_direct(dev, ops)) in dma_free_pages()
511 dma_direct_free_pages(dev, size, page, dma_handle, dir); in dma_free_pages()
513 ops->free_pages(dev, size, page, dma_handle, dir); in dma_free_pages()
517 void *dma_alloc_noncoherent(struct device *dev, size_t size, in dma_alloc_noncoherent() argument
520 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_alloc_noncoherent()
526 page = dma_alloc_pages(dev, size, dma_handle, dir, gfp); in dma_alloc_noncoherent()
533 vaddr = ops->alloc_noncoherent(dev, size, dma_handle, dir, gfp); in dma_alloc_noncoherent()
535 debug_dma_map_page(dev, virt_to_page(vaddr), 0, size, dir, in dma_alloc_noncoherent()
541 void dma_free_noncoherent(struct device *dev, size_t size, void *vaddr, in dma_free_noncoherent() argument
544 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_free_noncoherent()
547 dma_free_pages(dev, size, virt_to_page(vaddr), dma_handle, dir); in dma_free_noncoherent()
552 debug_dma_unmap_page(dev, dma_handle, size, dir); in dma_free_noncoherent()
553 ops->free_noncoherent(dev, size, vaddr, dma_handle, dir); in dma_free_noncoherent()
557 int dma_supported(struct device *dev, u64 mask) in dma_supported() argument
559 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_supported()
566 return dma_direct_supported(dev, mask); in dma_supported()
569 return ops->dma_supported(dev, mask); in dma_supported()
574 void arch_dma_set_mask(struct device *dev, u64 mask);
576 #define arch_dma_set_mask(dev, mask) do { } while (0) argument
579 int dma_set_mask(struct device *dev, u64 mask) in dma_set_mask() argument
587 if (!dev->dma_mask || !dma_supported(dev, mask)) in dma_set_mask()
590 arch_dma_set_mask(dev, mask); in dma_set_mask()
591 *dev->dma_mask = mask; in dma_set_mask()
597 int dma_set_coherent_mask(struct device *dev, u64 mask) in dma_set_coherent_mask() argument
605 if (!dma_supported(dev, mask)) in dma_set_coherent_mask()
608 dev->coherent_dma_mask = mask; in dma_set_coherent_mask()
614 size_t dma_max_mapping_size(struct device *dev) in dma_max_mapping_size() argument
616 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_max_mapping_size()
619 if (dma_map_direct(dev, ops)) in dma_max_mapping_size()
620 size = dma_direct_max_mapping_size(dev); in dma_max_mapping_size()
622 size = ops->max_mapping_size(dev); in dma_max_mapping_size()
628 bool dma_need_sync(struct device *dev, dma_addr_t dma_addr) in dma_need_sync() argument
630 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_need_sync()
632 if (dma_map_direct(dev, ops)) in dma_need_sync()
633 return dma_direct_need_sync(dev, dma_addr); in dma_need_sync()
638 unsigned long dma_get_merge_boundary(struct device *dev) in dma_get_merge_boundary() argument
640 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_get_merge_boundary()
645 return ops->get_merge_boundary(dev); in dma_get_merge_boundary()