Lines Matching refs:device
18 void *(*alloc)(struct device *dev, size_t size,
21 void (*free)(struct device *dev, size_t size, void *vaddr,
23 struct page *(*alloc_pages_op)(struct device *dev, size_t size,
26 void (*free_pages)(struct device *dev, size_t size, struct page *vaddr,
28 int (*mmap)(struct device *, struct vm_area_struct *,
31 int (*get_sgtable)(struct device *dev, struct sg_table *sgt,
35 dma_addr_t (*map_page)(struct device *dev, struct page *page,
38 void (*unmap_page)(struct device *dev, dma_addr_t dma_handle,
46 int (*map_sg)(struct device *dev, struct scatterlist *sg, int nents,
48 void (*unmap_sg)(struct device *dev, struct scatterlist *sg, int nents,
50 dma_addr_t (*map_resource)(struct device *dev, phys_addr_t phys_addr,
53 void (*unmap_resource)(struct device *dev, dma_addr_t dma_handle,
56 void (*sync_single_for_cpu)(struct device *dev, dma_addr_t dma_handle,
58 void (*sync_single_for_device)(struct device *dev,
61 void (*sync_sg_for_cpu)(struct device *dev, struct scatterlist *sg,
63 void (*sync_sg_for_device)(struct device *dev, struct scatterlist *sg,
65 void (*cache_sync)(struct device *dev, void *vaddr, size_t size,
67 int (*dma_supported)(struct device *dev, u64 mask);
68 u64 (*get_required_mask)(struct device *dev);
69 size_t (*max_mapping_size)(struct device *dev);
71 unsigned long (*get_merge_boundary)(struct device *dev);
82 static inline const struct dma_map_ops *get_dma_ops(struct device *dev) in get_dma_ops()
89 static inline void set_dma_ops(struct device *dev, in set_dma_ops()
95 static inline const struct dma_map_ops *get_dma_ops(struct device *dev) in get_dma_ops()
99 static inline void set_dma_ops(struct device *dev, in set_dma_ops()
108 static inline struct cma *dev_get_cma_area(struct device *dev) in dev_get_cma_area()
119 struct page *dma_alloc_from_contiguous(struct device *dev, size_t count,
121 bool dma_release_from_contiguous(struct device *dev, struct page *pages,
123 struct page *dma_alloc_contiguous(struct device *dev, size_t size, gfp_t gfp);
124 void dma_free_contiguous(struct device *dev, struct page *page, size_t size);
128 static inline struct cma *dev_get_cma_area(struct device *dev) in dev_get_cma_area()
141 static inline struct page *dma_alloc_from_contiguous(struct device *dev, in dma_alloc_from_contiguous()
146 static inline bool dma_release_from_contiguous(struct device *dev, in dma_release_from_contiguous()
152 static inline struct page *dma_alloc_contiguous(struct device *dev, size_t size, in dma_alloc_contiguous()
157 static inline void dma_free_contiguous(struct device *dev, struct page *page, in dma_free_contiguous()
168 int dma_declare_coherent_memory(struct device *dev, phys_addr_t phys_addr,
170 void dma_release_coherent_memory(struct device *dev);
171 int dma_alloc_from_dev_coherent(struct device *dev, ssize_t size,
173 int dma_release_from_dev_coherent(struct device *dev, int order, void *vaddr);
174 int dma_mmap_from_dev_coherent(struct device *dev, struct vm_area_struct *vma,
177 static inline int dma_declare_coherent_memory(struct device *dev, in dma_declare_coherent_memory()
186 static inline void dma_release_coherent_memory(struct device *dev) { } in dma_release_coherent_memory()
190 void *dma_alloc_from_global_coherent(struct device *dev, ssize_t size,
197 static inline void *dma_alloc_from_global_coherent(struct device *dev, in dma_alloc_from_global_coherent()
213 int dma_common_get_sgtable(struct device *dev, struct sg_table *sgt,
216 int dma_common_mmap(struct device *dev, struct vm_area_struct *vma,
219 struct page *dma_common_alloc_pages(struct device *dev, size_t size,
221 void dma_common_free_pages(struct device *dev, size_t size, struct page *vaddr,
231 struct page *dma_alloc_from_pool(struct device *dev, size_t size,
233 bool (*phys_addr_ok)(struct device *, phys_addr_t, size_t));
234 bool dma_free_from_pool(struct device *dev, void *start, size_t size);
236 int dma_direct_set_offset(struct device *dev, phys_addr_t cpu_start,
243 static inline bool dev_is_dma_coherent(struct device *dev) in dev_is_dma_coherent()
250 static inline bool dev_is_dma_coherent(struct device *dev) in dev_is_dma_coherent()
256 static inline void dma_reset_need_sync(struct device *dev) in dma_reset_need_sync()
268 static inline bool dma_kmalloc_safe(struct device *dev, in dma_kmalloc_safe()
319 static inline bool dma_kmalloc_needs_bounce(struct device *dev, size_t size, in dma_kmalloc_needs_bounce()
325 void *arch_dma_alloc(struct device *dev, size_t size, dma_addr_t *dma_handle,
327 void arch_dma_free(struct device *dev, size_t size, void *cpu_addr,
331 void arch_dma_set_mask(struct device *dev, u64 mask);
347 pgprot_t dma_pgprot(struct device *dev, pgprot_t prot, unsigned long attrs);
349 static inline pgprot_t dma_pgprot(struct device *dev, pgprot_t prot, in dma_pgprot()
404 bool arch_dma_map_page_direct(struct device *dev, phys_addr_t addr);
405 bool arch_dma_unmap_page_direct(struct device *dev, dma_addr_t dma_handle);
406 bool arch_dma_map_sg_direct(struct device *dev, struct scatterlist *sg,
408 bool arch_dma_unmap_sg_direct(struct device *dev, struct scatterlist *sg,
418 void arch_setup_dma_ops(struct device *dev, bool coherent);
420 static inline void arch_setup_dma_ops(struct device *dev, bool coherent) in arch_setup_dma_ops()
426 void arch_teardown_dma_ops(struct device *dev);
428 static inline void arch_teardown_dma_ops(struct device *dev) in arch_teardown_dma_ops()
435 void debug_dma_dump_mappings(struct device *dev);
440 static inline void debug_dma_dump_mappings(struct device *dev) in debug_dma_dump_mappings()