• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (C) 2006  Ralf Baechle <ralf@linux-mips.org>
7  *
8  */
9 #ifndef __ASM_MACH_GENERIC_DMA_COHERENCE_H
10 #define __ASM_MACH_GENERIC_DMA_COHERENCE_H
11 
12 struct device;
13 
plat_map_dma_mem(struct device * dev,void * addr,size_t size)14 static inline dma_addr_t plat_map_dma_mem(struct device *dev, void *addr,
15 	size_t size)
16 {
17 	return virt_to_phys(addr);
18 }
19 
plat_map_dma_mem_page(struct device * dev,struct page * page)20 static inline dma_addr_t plat_map_dma_mem_page(struct device *dev,
21 	struct page *page)
22 {
23 	return page_to_phys(page);
24 }
25 
plat_dma_addr_to_phys(struct device * dev,dma_addr_t dma_addr)26 static inline unsigned long plat_dma_addr_to_phys(struct device *dev,
27 	dma_addr_t dma_addr)
28 {
29 	return dma_addr;
30 }
31 
plat_unmap_dma_mem(struct device * dev,dma_addr_t dma_addr,size_t size,enum dma_data_direction direction)32 static inline void plat_unmap_dma_mem(struct device *dev, dma_addr_t dma_addr,
33 	size_t size, enum dma_data_direction direction)
34 {
35 }
36 
plat_dma_supported(struct device * dev,u64 mask)37 static inline int plat_dma_supported(struct device *dev, u64 mask)
38 {
39 	/*
40 	 * we fall back to GFP_DMA when the mask isn't all 1s,
41 	 * so we can't guarantee allocations that must be
42 	 * within a tighter range than GFP_DMA..
43 	 */
44 	if (mask < DMA_BIT_MASK(24))
45 		return 0;
46 
47 	return 1;
48 }
49 
plat_device_is_coherent(struct device * dev)50 static inline int plat_device_is_coherent(struct device *dev)
51 {
52 #ifdef CONFIG_DMA_PERDEV_COHERENT
53 	return dev->archdata.dma_coherent;
54 #else
55 	switch (coherentio) {
56 	default:
57 	case IO_COHERENCE_DEFAULT:
58 		return hw_coherentio;
59 	case IO_COHERENCE_ENABLED:
60 		return 1;
61 	case IO_COHERENCE_DISABLED:
62 		return 0;
63 	}
64 #endif
65 }
66 
67 #ifndef plat_post_dma_flush
plat_post_dma_flush(struct device * dev)68 static inline void plat_post_dma_flush(struct device *dev)
69 {
70 }
71 #endif
72 
73 #ifdef CONFIG_SWIOTLB
phys_to_dma(struct device * dev,phys_addr_t paddr)74 static inline dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr)
75 {
76 	return paddr;
77 }
78 
dma_to_phys(struct device * dev,dma_addr_t daddr)79 static inline phys_addr_t dma_to_phys(struct device *dev, dma_addr_t daddr)
80 {
81 	return daddr;
82 }
83 #endif
84 
85 #endif /* __ASM_MACH_GENERIC_DMA_COHERENCE_H */
86