• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Copyright (c) 2018-2019 Synopsys, Inc. and/or its affiliates.
4  * Synopsys DesignWare eDMA core driver
5  *
6  * Author: Gustavo Pimentel <gustavo.pimentel@synopsys.com>
7  */
8 
9 #ifndef _DW_EDMA_CORE_H
10 #define _DW_EDMA_CORE_H
11 
12 #include <linux/msi.h>
13 #include <linux/dma/edma.h>
14 
15 #include "../virt-dma.h"
16 
17 #define EDMA_LL_SZ					24
18 
19 enum dw_edma_dir {
20 	EDMA_DIR_WRITE = 0,
21 	EDMA_DIR_READ
22 };
23 
24 enum dw_edma_request {
25 	EDMA_REQ_NONE = 0,
26 	EDMA_REQ_STOP,
27 	EDMA_REQ_PAUSE
28 };
29 
30 enum dw_edma_status {
31 	EDMA_ST_IDLE = 0,
32 	EDMA_ST_PAUSE,
33 	EDMA_ST_BUSY
34 };
35 
36 enum dw_edma_xfer_type {
37 	EDMA_XFER_SCATTER_GATHER = 0,
38 	EDMA_XFER_CYCLIC,
39 	EDMA_XFER_INTERLEAVED
40 };
41 
42 struct dw_edma_chan;
43 struct dw_edma_chunk;
44 
45 struct dw_edma_burst {
46 	struct list_head		list;
47 	u64				sar;
48 	u64				dar;
49 	u32				sz;
50 };
51 
52 struct dw_edma_chunk {
53 	struct list_head		list;
54 	struct dw_edma_chan		*chan;
55 	struct dw_edma_burst		*burst;
56 
57 	u32				bursts_alloc;
58 
59 	u8				cb;
60 	struct dw_edma_region		ll_region;	/* Linked list */
61 };
62 
63 struct dw_edma_desc {
64 	struct virt_dma_desc		vd;
65 	struct dw_edma_chan		*chan;
66 	struct dw_edma_chunk		*chunk;
67 
68 	u32				chunks_alloc;
69 
70 	u32				alloc_sz;
71 	u32				xfer_sz;
72 };
73 
74 struct dw_edma_chan {
75 	struct virt_dma_chan		vc;
76 	struct dw_edma			*dw;
77 	int				id;
78 	enum dw_edma_dir		dir;
79 
80 	u32				ll_max;
81 
82 	struct msi_msg			msi;
83 
84 	enum dw_edma_request		request;
85 	enum dw_edma_status		status;
86 	u8				configured;
87 
88 	struct dma_slave_config		config;
89 };
90 
91 struct dw_edma_irq {
92 	struct msi_msg                  msi;
93 	u32				wr_mask;
94 	u32				rd_mask;
95 	struct dw_edma			*dw;
96 };
97 
98 struct dw_edma {
99 	char				name[32];
100 
101 	struct dma_device		dma;
102 
103 	u16				wr_ch_cnt;
104 	u16				rd_ch_cnt;
105 
106 	struct dw_edma_irq		*irq;
107 	int				nr_irqs;
108 
109 	struct dw_edma_chan		*chan;
110 
111 	raw_spinlock_t			lock;		/* Only for legacy */
112 
113 	struct dw_edma_chip             *chip;
114 
115 	const struct dw_edma_core_ops	*core;
116 };
117 
118 typedef void (*dw_edma_handler_t)(struct dw_edma_chan *);
119 
120 struct dw_edma_core_ops {
121 	void (*off)(struct dw_edma *dw);
122 	u16 (*ch_count)(struct dw_edma *dw, enum dw_edma_dir dir);
123 	enum dma_status (*ch_status)(struct dw_edma_chan *chan);
124 	irqreturn_t (*handle_int)(struct dw_edma_irq *dw_irq, enum dw_edma_dir dir,
125 				  dw_edma_handler_t done, dw_edma_handler_t abort);
126 	void (*start)(struct dw_edma_chunk *chunk, bool first);
127 	void (*ch_config)(struct dw_edma_chan *chan);
128 	void (*debugfs_on)(struct dw_edma *dw);
129 };
130 
131 struct dw_edma_sg {
132 	struct scatterlist		*sgl;
133 	unsigned int			len;
134 };
135 
136 struct dw_edma_cyclic {
137 	dma_addr_t			paddr;
138 	size_t				len;
139 	size_t				cnt;
140 };
141 
142 struct dw_edma_transfer {
143 	struct dma_chan			*dchan;
144 	union dw_edma_xfer {
145 		struct dw_edma_sg		sg;
146 		struct dw_edma_cyclic		cyclic;
147 		struct dma_interleaved_template *il;
148 	} xfer;
149 	enum dma_transfer_direction	direction;
150 	unsigned long			flags;
151 	enum dw_edma_xfer_type		type;
152 };
153 
154 static inline
vc2dw_edma_chan(struct virt_dma_chan * vc)155 struct dw_edma_chan *vc2dw_edma_chan(struct virt_dma_chan *vc)
156 {
157 	return container_of(vc, struct dw_edma_chan, vc);
158 }
159 
160 static inline
dchan2dw_edma_chan(struct dma_chan * dchan)161 struct dw_edma_chan *dchan2dw_edma_chan(struct dma_chan *dchan)
162 {
163 	return vc2dw_edma_chan(to_virt_chan(dchan));
164 }
165 
166 static inline
dw_edma_core_off(struct dw_edma * dw)167 void dw_edma_core_off(struct dw_edma *dw)
168 {
169 	dw->core->off(dw);
170 }
171 
172 static inline
dw_edma_core_ch_count(struct dw_edma * dw,enum dw_edma_dir dir)173 u16 dw_edma_core_ch_count(struct dw_edma *dw, enum dw_edma_dir dir)
174 {
175 	return dw->core->ch_count(dw, dir);
176 }
177 
178 static inline
dw_edma_core_ch_status(struct dw_edma_chan * chan)179 enum dma_status dw_edma_core_ch_status(struct dw_edma_chan *chan)
180 {
181 	return chan->dw->core->ch_status(chan);
182 }
183 
184 static inline irqreturn_t
dw_edma_core_handle_int(struct dw_edma_irq * dw_irq,enum dw_edma_dir dir,dw_edma_handler_t done,dw_edma_handler_t abort)185 dw_edma_core_handle_int(struct dw_edma_irq *dw_irq, enum dw_edma_dir dir,
186 			dw_edma_handler_t done, dw_edma_handler_t abort)
187 {
188 	return dw_irq->dw->core->handle_int(dw_irq, dir, done, abort);
189 }
190 
191 static inline
dw_edma_core_start(struct dw_edma * dw,struct dw_edma_chunk * chunk,bool first)192 void dw_edma_core_start(struct dw_edma *dw, struct dw_edma_chunk *chunk, bool first)
193 {
194 	dw->core->start(chunk, first);
195 }
196 
197 static inline
dw_edma_core_ch_config(struct dw_edma_chan * chan)198 void dw_edma_core_ch_config(struct dw_edma_chan *chan)
199 {
200 	chan->dw->core->ch_config(chan);
201 }
202 
203 static inline
dw_edma_core_debugfs_on(struct dw_edma * dw)204 void dw_edma_core_debugfs_on(struct dw_edma *dw)
205 {
206 	dw->core->debugfs_on(dw);
207 }
208 
209 #endif /* _DW_EDMA_CORE_H */
210