• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #ifndef _M68K_DMA_MAPPING_H
2 #define _M68K_DMA_MAPPING_H
3 
4 #include <asm/cache.h>
5 
6 struct scatterlist;
7 
8 #ifndef CONFIG_MMU_SUN3
dma_supported(struct device * dev,u64 mask)9 static inline int dma_supported(struct device *dev, u64 mask)
10 {
11 	return 1;
12 }
13 
dma_set_mask(struct device * dev,u64 mask)14 static inline int dma_set_mask(struct device *dev, u64 mask)
15 {
16 	return 0;
17 }
18 
dma_get_cache_alignment(void)19 static inline int dma_get_cache_alignment(void)
20 {
21 	return 1 << L1_CACHE_SHIFT;
22 }
23 
dma_is_consistent(struct device * dev,dma_addr_t dma_addr)24 static inline int dma_is_consistent(struct device *dev, dma_addr_t dma_addr)
25 {
26 	return 0;
27 }
28 
29 extern void *dma_alloc_coherent(struct device *, size_t,
30 				dma_addr_t *, gfp_t);
31 extern void dma_free_coherent(struct device *, size_t,
32 			      void *, dma_addr_t);
33 
dma_alloc_noncoherent(struct device * dev,size_t size,dma_addr_t * handle,gfp_t flag)34 static inline void *dma_alloc_noncoherent(struct device *dev, size_t size,
35 					  dma_addr_t *handle, gfp_t flag)
36 {
37 	return dma_alloc_coherent(dev, size, handle, flag);
38 }
dma_free_noncoherent(struct device * dev,size_t size,void * addr,dma_addr_t handle)39 static inline void dma_free_noncoherent(struct device *dev, size_t size,
40 					void *addr, dma_addr_t handle)
41 {
42 	dma_free_coherent(dev, size, addr, handle);
43 }
dma_cache_sync(struct device * dev,void * vaddr,size_t size,enum dma_data_direction dir)44 static inline void dma_cache_sync(struct device *dev, void *vaddr, size_t size,
45 				  enum dma_data_direction dir)
46 {
47 	/* we use coherent allocation, so not much to do here. */
48 }
49 
50 extern dma_addr_t dma_map_single(struct device *, void *, size_t,
51 				 enum dma_data_direction);
dma_unmap_single(struct device * dev,dma_addr_t addr,size_t size,enum dma_data_direction dir)52 static inline void dma_unmap_single(struct device *dev, dma_addr_t addr,
53 				    size_t size, enum dma_data_direction dir)
54 {
55 }
56 
57 extern dma_addr_t dma_map_page(struct device *, struct page *,
58 			       unsigned long, size_t size,
59 			       enum dma_data_direction);
dma_unmap_page(struct device * dev,dma_addr_t address,size_t size,enum dma_data_direction dir)60 static inline void dma_unmap_page(struct device *dev, dma_addr_t address,
61 				  size_t size, enum dma_data_direction dir)
62 {
63 }
64 
65 extern int dma_map_sg(struct device *, struct scatterlist *, int,
66 		      enum dma_data_direction);
dma_unmap_sg(struct device * dev,struct scatterlist * sg,int nhwentries,enum dma_data_direction dir)67 static inline void dma_unmap_sg(struct device *dev, struct scatterlist *sg,
68 				int nhwentries, enum dma_data_direction dir)
69 {
70 }
71 
72 extern void dma_sync_single_for_device(struct device *, dma_addr_t, size_t,
73 				       enum dma_data_direction);
74 extern void dma_sync_sg_for_device(struct device *, struct scatterlist *, int,
75 				   enum dma_data_direction);
76 
dma_sync_single_range_for_device(struct device * dev,dma_addr_t dma_handle,unsigned long offset,size_t size,enum dma_data_direction direction)77 static inline void dma_sync_single_range_for_device(struct device *dev,
78 		dma_addr_t dma_handle, unsigned long offset, size_t size,
79 		enum dma_data_direction direction)
80 {
81 	/* just sync everything for now */
82 	dma_sync_single_for_device(dev, dma_handle, offset + size, direction);
83 }
84 
dma_sync_single_for_cpu(struct device * dev,dma_addr_t handle,size_t size,enum dma_data_direction dir)85 static inline void dma_sync_single_for_cpu(struct device *dev, dma_addr_t handle,
86 					   size_t size, enum dma_data_direction dir)
87 {
88 }
89 
dma_sync_sg_for_cpu(struct device * dev,struct scatterlist * sg,int nents,enum dma_data_direction dir)90 static inline void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg,
91 				       int nents, enum dma_data_direction dir)
92 {
93 }
94 
dma_sync_single_range_for_cpu(struct device * dev,dma_addr_t dma_handle,unsigned long offset,size_t size,enum dma_data_direction direction)95 static inline void dma_sync_single_range_for_cpu(struct device *dev,
96 		dma_addr_t dma_handle, unsigned long offset, size_t size,
97 		enum dma_data_direction direction)
98 {
99 	/* just sync everything for now */
100 	dma_sync_single_for_cpu(dev, dma_handle, offset + size, direction);
101 }
102 
dma_mapping_error(struct device * dev,dma_addr_t handle)103 static inline int dma_mapping_error(struct device *dev, dma_addr_t handle)
104 {
105 	return 0;
106 }
107 
108 #else
109 #include <asm-generic/dma-mapping-broken.h>
110 #endif
111 
112 #endif  /* _M68K_DMA_MAPPING_H */
113