• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  *
4  * Copyright (C) STMicroelectronics SA 2017
5  * Author(s): M'boumba Cedric Madianga <cedric.madianga@gmail.com>
6  *            Pierre-Yves Mordret <pierre-yves.mordret@st.com>
7  *
8  * Driver for STM32 MDMA controller
9  *
10  * Inspired by stm32-dma.c and dma-jz4780.c
11  */
12 
13 #include <linux/clk.h>
14 #include <linux/delay.h>
15 #include <linux/dmaengine.h>
16 #include <linux/dma-mapping.h>
17 #include <linux/dmapool.h>
18 #include <linux/err.h>
19 #include <linux/init.h>
20 #include <linux/iopoll.h>
21 #include <linux/jiffies.h>
22 #include <linux/list.h>
23 #include <linux/log2.h>
24 #include <linux/module.h>
25 #include <linux/of.h>
26 #include <linux/of_device.h>
27 #include <linux/of_dma.h>
28 #include <linux/platform_device.h>
29 #include <linux/pm_runtime.h>
30 #include <linux/reset.h>
31 #include <linux/slab.h>
32 
33 #include "virt-dma.h"
34 
35 /*  MDMA Generic getter/setter */
36 #define STM32_MDMA_SHIFT(n)		(ffs(n) - 1)
37 #define STM32_MDMA_SET(n, mask)		(((n) << STM32_MDMA_SHIFT(mask)) & \
38 					 (mask))
39 #define STM32_MDMA_GET(n, mask)		(((n) & (mask)) >> \
40 					 STM32_MDMA_SHIFT(mask))
41 
42 #define STM32_MDMA_GISR0		0x0000 /* MDMA Int Status Reg 1 */
43 
44 /* MDMA Channel x interrupt/status register */
45 #define STM32_MDMA_CISR(x)		(0x40 + 0x40 * (x)) /* x = 0..62 */
46 #define STM32_MDMA_CISR_CRQA		BIT(16)
47 #define STM32_MDMA_CISR_TCIF		BIT(4)
48 #define STM32_MDMA_CISR_BTIF		BIT(3)
49 #define STM32_MDMA_CISR_BRTIF		BIT(2)
50 #define STM32_MDMA_CISR_CTCIF		BIT(1)
51 #define STM32_MDMA_CISR_TEIF		BIT(0)
52 
53 /* MDMA Channel x interrupt flag clear register */
54 #define STM32_MDMA_CIFCR(x)		(0x44 + 0x40 * (x))
55 #define STM32_MDMA_CIFCR_CLTCIF		BIT(4)
56 #define STM32_MDMA_CIFCR_CBTIF		BIT(3)
57 #define STM32_MDMA_CIFCR_CBRTIF		BIT(2)
58 #define STM32_MDMA_CIFCR_CCTCIF		BIT(1)
59 #define STM32_MDMA_CIFCR_CTEIF		BIT(0)
60 #define STM32_MDMA_CIFCR_CLEAR_ALL	(STM32_MDMA_CIFCR_CLTCIF \
61 					| STM32_MDMA_CIFCR_CBTIF \
62 					| STM32_MDMA_CIFCR_CBRTIF \
63 					| STM32_MDMA_CIFCR_CCTCIF \
64 					| STM32_MDMA_CIFCR_CTEIF)
65 
66 /* MDMA Channel x error status register */
67 #define STM32_MDMA_CESR(x)		(0x48 + 0x40 * (x))
68 #define STM32_MDMA_CESR_BSE		BIT(11)
69 #define STM32_MDMA_CESR_ASR		BIT(10)
70 #define STM32_MDMA_CESR_TEMD		BIT(9)
71 #define STM32_MDMA_CESR_TELD		BIT(8)
72 #define STM32_MDMA_CESR_TED		BIT(7)
73 #define STM32_MDMA_CESR_TEA_MASK	GENMASK(6, 0)
74 
75 /* MDMA Channel x control register */
76 #define STM32_MDMA_CCR(x)		(0x4C + 0x40 * (x))
77 #define STM32_MDMA_CCR_SWRQ		BIT(16)
78 #define STM32_MDMA_CCR_WEX		BIT(14)
79 #define STM32_MDMA_CCR_HEX		BIT(13)
80 #define STM32_MDMA_CCR_BEX		BIT(12)
81 #define STM32_MDMA_CCR_PL_MASK		GENMASK(7, 6)
82 #define STM32_MDMA_CCR_PL(n)		STM32_MDMA_SET(n, \
83 						       STM32_MDMA_CCR_PL_MASK)
84 #define STM32_MDMA_CCR_TCIE		BIT(5)
85 #define STM32_MDMA_CCR_BTIE		BIT(4)
86 #define STM32_MDMA_CCR_BRTIE		BIT(3)
87 #define STM32_MDMA_CCR_CTCIE		BIT(2)
88 #define STM32_MDMA_CCR_TEIE		BIT(1)
89 #define STM32_MDMA_CCR_EN		BIT(0)
90 #define STM32_MDMA_CCR_IRQ_MASK		(STM32_MDMA_CCR_TCIE \
91 					| STM32_MDMA_CCR_BTIE \
92 					| STM32_MDMA_CCR_BRTIE \
93 					| STM32_MDMA_CCR_CTCIE \
94 					| STM32_MDMA_CCR_TEIE)
95 
96 /* MDMA Channel x transfer configuration register */
97 #define STM32_MDMA_CTCR(x)		(0x50 + 0x40 * (x))
98 #define STM32_MDMA_CTCR_BWM		BIT(31)
99 #define STM32_MDMA_CTCR_SWRM		BIT(30)
100 #define STM32_MDMA_CTCR_TRGM_MSK	GENMASK(29, 28)
101 #define STM32_MDMA_CTCR_TRGM(n)		STM32_MDMA_SET((n), \
102 						       STM32_MDMA_CTCR_TRGM_MSK)
103 #define STM32_MDMA_CTCR_TRGM_GET(n)	STM32_MDMA_GET((n), \
104 						       STM32_MDMA_CTCR_TRGM_MSK)
105 #define STM32_MDMA_CTCR_PAM_MASK	GENMASK(27, 26)
106 #define STM32_MDMA_CTCR_PAM(n)		STM32_MDMA_SET(n, \
107 						       STM32_MDMA_CTCR_PAM_MASK)
108 #define STM32_MDMA_CTCR_PKE		BIT(25)
109 #define STM32_MDMA_CTCR_TLEN_MSK	GENMASK(24, 18)
110 #define STM32_MDMA_CTCR_TLEN(n)		STM32_MDMA_SET((n), \
111 						       STM32_MDMA_CTCR_TLEN_MSK)
112 #define STM32_MDMA_CTCR_TLEN_GET(n)	STM32_MDMA_GET((n), \
113 						       STM32_MDMA_CTCR_TLEN_MSK)
114 #define STM32_MDMA_CTCR_LEN2_MSK	GENMASK(25, 18)
115 #define STM32_MDMA_CTCR_LEN2(n)		STM32_MDMA_SET((n), \
116 						       STM32_MDMA_CTCR_LEN2_MSK)
117 #define STM32_MDMA_CTCR_LEN2_GET(n)	STM32_MDMA_GET((n), \
118 						       STM32_MDMA_CTCR_LEN2_MSK)
119 #define STM32_MDMA_CTCR_DBURST_MASK	GENMASK(17, 15)
120 #define STM32_MDMA_CTCR_DBURST(n)	STM32_MDMA_SET(n, \
121 						    STM32_MDMA_CTCR_DBURST_MASK)
122 #define STM32_MDMA_CTCR_SBURST_MASK	GENMASK(14, 12)
123 #define STM32_MDMA_CTCR_SBURST(n)	STM32_MDMA_SET(n, \
124 						    STM32_MDMA_CTCR_SBURST_MASK)
125 #define STM32_MDMA_CTCR_DINCOS_MASK	GENMASK(11, 10)
126 #define STM32_MDMA_CTCR_DINCOS(n)	STM32_MDMA_SET((n), \
127 						    STM32_MDMA_CTCR_DINCOS_MASK)
128 #define STM32_MDMA_CTCR_SINCOS_MASK	GENMASK(9, 8)
129 #define STM32_MDMA_CTCR_SINCOS(n)	STM32_MDMA_SET((n), \
130 						    STM32_MDMA_CTCR_SINCOS_MASK)
131 #define STM32_MDMA_CTCR_DSIZE_MASK	GENMASK(7, 6)
132 #define STM32_MDMA_CTCR_DSIZE(n)	STM32_MDMA_SET(n, \
133 						     STM32_MDMA_CTCR_DSIZE_MASK)
134 #define STM32_MDMA_CTCR_SSIZE_MASK	GENMASK(5, 4)
135 #define STM32_MDMA_CTCR_SSIZE(n)	STM32_MDMA_SET(n, \
136 						     STM32_MDMA_CTCR_SSIZE_MASK)
137 #define STM32_MDMA_CTCR_DINC_MASK	GENMASK(3, 2)
138 #define STM32_MDMA_CTCR_DINC(n)		STM32_MDMA_SET((n), \
139 						      STM32_MDMA_CTCR_DINC_MASK)
140 #define STM32_MDMA_CTCR_SINC_MASK	GENMASK(1, 0)
141 #define STM32_MDMA_CTCR_SINC(n)		STM32_MDMA_SET((n), \
142 						      STM32_MDMA_CTCR_SINC_MASK)
143 #define STM32_MDMA_CTCR_CFG_MASK	(STM32_MDMA_CTCR_SINC_MASK \
144 					| STM32_MDMA_CTCR_DINC_MASK \
145 					| STM32_MDMA_CTCR_SINCOS_MASK \
146 					| STM32_MDMA_CTCR_DINCOS_MASK \
147 					| STM32_MDMA_CTCR_LEN2_MSK \
148 					| STM32_MDMA_CTCR_TRGM_MSK)
149 
150 /* MDMA Channel x block number of data register */
151 #define STM32_MDMA_CBNDTR(x)		(0x54 + 0x40 * (x))
152 #define STM32_MDMA_CBNDTR_BRC_MK	GENMASK(31, 20)
153 #define STM32_MDMA_CBNDTR_BRC(n)	STM32_MDMA_SET(n, \
154 						       STM32_MDMA_CBNDTR_BRC_MK)
155 #define STM32_MDMA_CBNDTR_BRC_GET(n)	STM32_MDMA_GET((n), \
156 						       STM32_MDMA_CBNDTR_BRC_MK)
157 
158 #define STM32_MDMA_CBNDTR_BRDUM		BIT(19)
159 #define STM32_MDMA_CBNDTR_BRSUM		BIT(18)
160 #define STM32_MDMA_CBNDTR_BNDT_MASK	GENMASK(16, 0)
161 #define STM32_MDMA_CBNDTR_BNDT(n)	STM32_MDMA_SET(n, \
162 						    STM32_MDMA_CBNDTR_BNDT_MASK)
163 
164 /* MDMA Channel x source address register */
165 #define STM32_MDMA_CSAR(x)		(0x58 + 0x40 * (x))
166 
167 /* MDMA Channel x destination address register */
168 #define STM32_MDMA_CDAR(x)		(0x5C + 0x40 * (x))
169 
170 /* MDMA Channel x block repeat address update register */
171 #define STM32_MDMA_CBRUR(x)		(0x60 + 0x40 * (x))
172 #define STM32_MDMA_CBRUR_DUV_MASK	GENMASK(31, 16)
173 #define STM32_MDMA_CBRUR_DUV(n)		STM32_MDMA_SET(n, \
174 						      STM32_MDMA_CBRUR_DUV_MASK)
175 #define STM32_MDMA_CBRUR_SUV_MASK	GENMASK(15, 0)
176 #define STM32_MDMA_CBRUR_SUV(n)		STM32_MDMA_SET(n, \
177 						      STM32_MDMA_CBRUR_SUV_MASK)
178 
179 /* MDMA Channel x link address register */
180 #define STM32_MDMA_CLAR(x)		(0x64 + 0x40 * (x))
181 
182 /* MDMA Channel x trigger and bus selection register */
183 #define STM32_MDMA_CTBR(x)		(0x68 + 0x40 * (x))
184 #define STM32_MDMA_CTBR_DBUS		BIT(17)
185 #define STM32_MDMA_CTBR_SBUS		BIT(16)
186 #define STM32_MDMA_CTBR_TSEL_MASK	GENMASK(5, 0)
187 #define STM32_MDMA_CTBR_TSEL(n)		STM32_MDMA_SET(n, \
188 						      STM32_MDMA_CTBR_TSEL_MASK)
189 
190 /* MDMA Channel x mask address register */
191 #define STM32_MDMA_CMAR(x)		(0x70 + 0x40 * (x))
192 
193 /* MDMA Channel x mask data register */
194 #define STM32_MDMA_CMDR(x)		(0x74 + 0x40 * (x))
195 
196 #define STM32_MDMA_MAX_BUF_LEN		128
197 #define STM32_MDMA_MAX_BLOCK_LEN	65536
198 #define STM32_MDMA_MAX_CHANNELS		32
199 #define STM32_MDMA_MAX_REQUESTS		256
200 #define STM32_MDMA_MAX_BURST		128
201 #define STM32_MDMA_VERY_HIGH_PRIORITY	0x11
202 
203 enum stm32_mdma_trigger_mode {
204 	STM32_MDMA_BUFFER,
205 	STM32_MDMA_BLOCK,
206 	STM32_MDMA_BLOCK_REP,
207 	STM32_MDMA_LINKED_LIST,
208 };
209 
210 enum stm32_mdma_width {
211 	STM32_MDMA_BYTE,
212 	STM32_MDMA_HALF_WORD,
213 	STM32_MDMA_WORD,
214 	STM32_MDMA_DOUBLE_WORD,
215 };
216 
217 enum stm32_mdma_inc_mode {
218 	STM32_MDMA_FIXED = 0,
219 	STM32_MDMA_INC = 2,
220 	STM32_MDMA_DEC = 3,
221 };
222 
223 struct stm32_mdma_chan_config {
224 	u32 request;
225 	u32 priority_level;
226 	u32 transfer_config;
227 	u32 mask_addr;
228 	u32 mask_data;
229 };
230 
231 struct stm32_mdma_hwdesc {
232 	u32 ctcr;
233 	u32 cbndtr;
234 	u32 csar;
235 	u32 cdar;
236 	u32 cbrur;
237 	u32 clar;
238 	u32 ctbr;
239 	u32 dummy;
240 	u32 cmar;
241 	u32 cmdr;
242 } __aligned(64);
243 
244 struct stm32_mdma_desc_node {
245 	struct stm32_mdma_hwdesc *hwdesc;
246 	dma_addr_t hwdesc_phys;
247 };
248 
249 struct stm32_mdma_desc {
250 	struct virt_dma_desc vdesc;
251 	u32 ccr;
252 	bool cyclic;
253 	u32 count;
254 	struct stm32_mdma_desc_node node[];
255 };
256 
257 struct stm32_mdma_chan {
258 	struct virt_dma_chan vchan;
259 	struct dma_pool *desc_pool;
260 	u32 id;
261 	struct stm32_mdma_desc *desc;
262 	u32 curr_hwdesc;
263 	struct dma_slave_config dma_config;
264 	struct stm32_mdma_chan_config chan_config;
265 	bool busy;
266 	u32 mem_burst;
267 	u32 mem_width;
268 };
269 
270 struct stm32_mdma_device {
271 	struct dma_device ddev;
272 	void __iomem *base;
273 	struct clk *clk;
274 	int irq;
275 	struct reset_control *rst;
276 	u32 nr_channels;
277 	u32 nr_requests;
278 	u32 nr_ahb_addr_masks;
279 	struct stm32_mdma_chan chan[STM32_MDMA_MAX_CHANNELS];
280 	u32 ahb_addr_masks[];
281 };
282 
stm32_mdma_get_dev(struct stm32_mdma_chan * chan)283 static struct stm32_mdma_device *stm32_mdma_get_dev(
284 	struct stm32_mdma_chan *chan)
285 {
286 	return container_of(chan->vchan.chan.device, struct stm32_mdma_device,
287 			    ddev);
288 }
289 
to_stm32_mdma_chan(struct dma_chan * c)290 static struct stm32_mdma_chan *to_stm32_mdma_chan(struct dma_chan *c)
291 {
292 	return container_of(c, struct stm32_mdma_chan, vchan.chan);
293 }
294 
to_stm32_mdma_desc(struct virt_dma_desc * vdesc)295 static struct stm32_mdma_desc *to_stm32_mdma_desc(struct virt_dma_desc *vdesc)
296 {
297 	return container_of(vdesc, struct stm32_mdma_desc, vdesc);
298 }
299 
chan2dev(struct stm32_mdma_chan * chan)300 static struct device *chan2dev(struct stm32_mdma_chan *chan)
301 {
302 	return &chan->vchan.chan.dev->device;
303 }
304 
mdma2dev(struct stm32_mdma_device * mdma_dev)305 static struct device *mdma2dev(struct stm32_mdma_device *mdma_dev)
306 {
307 	return mdma_dev->ddev.dev;
308 }
309 
stm32_mdma_read(struct stm32_mdma_device * dmadev,u32 reg)310 static u32 stm32_mdma_read(struct stm32_mdma_device *dmadev, u32 reg)
311 {
312 	return readl_relaxed(dmadev->base + reg);
313 }
314 
stm32_mdma_write(struct stm32_mdma_device * dmadev,u32 reg,u32 val)315 static void stm32_mdma_write(struct stm32_mdma_device *dmadev, u32 reg, u32 val)
316 {
317 	writel_relaxed(val, dmadev->base + reg);
318 }
319 
stm32_mdma_set_bits(struct stm32_mdma_device * dmadev,u32 reg,u32 mask)320 static void stm32_mdma_set_bits(struct stm32_mdma_device *dmadev, u32 reg,
321 				u32 mask)
322 {
323 	void __iomem *addr = dmadev->base + reg;
324 
325 	writel_relaxed(readl_relaxed(addr) | mask, addr);
326 }
327 
stm32_mdma_clr_bits(struct stm32_mdma_device * dmadev,u32 reg,u32 mask)328 static void stm32_mdma_clr_bits(struct stm32_mdma_device *dmadev, u32 reg,
329 				u32 mask)
330 {
331 	void __iomem *addr = dmadev->base + reg;
332 
333 	writel_relaxed(readl_relaxed(addr) & ~mask, addr);
334 }
335 
stm32_mdma_alloc_desc(struct stm32_mdma_chan * chan,u32 count)336 static struct stm32_mdma_desc *stm32_mdma_alloc_desc(
337 		struct stm32_mdma_chan *chan, u32 count)
338 {
339 	struct stm32_mdma_desc *desc;
340 	int i;
341 
342 	desc = kzalloc(offsetof(typeof(*desc), node[count]), GFP_NOWAIT);
343 	if (!desc)
344 		return NULL;
345 
346 	for (i = 0; i < count; i++) {
347 		desc->node[i].hwdesc =
348 			dma_pool_alloc(chan->desc_pool, GFP_NOWAIT,
349 				       &desc->node[i].hwdesc_phys);
350 		if (!desc->node[i].hwdesc)
351 			goto err;
352 	}
353 
354 	desc->count = count;
355 
356 	return desc;
357 
358 err:
359 	dev_err(chan2dev(chan), "Failed to allocate descriptor\n");
360 	while (--i >= 0)
361 		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
362 			      desc->node[i].hwdesc_phys);
363 	kfree(desc);
364 	return NULL;
365 }
366 
stm32_mdma_desc_free(struct virt_dma_desc * vdesc)367 static void stm32_mdma_desc_free(struct virt_dma_desc *vdesc)
368 {
369 	struct stm32_mdma_desc *desc = to_stm32_mdma_desc(vdesc);
370 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan);
371 	int i;
372 
373 	for (i = 0; i < desc->count; i++)
374 		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
375 			      desc->node[i].hwdesc_phys);
376 	kfree(desc);
377 }
378 
stm32_mdma_get_width(struct stm32_mdma_chan * chan,enum dma_slave_buswidth width)379 static int stm32_mdma_get_width(struct stm32_mdma_chan *chan,
380 				enum dma_slave_buswidth width)
381 {
382 	switch (width) {
383 	case DMA_SLAVE_BUSWIDTH_1_BYTE:
384 	case DMA_SLAVE_BUSWIDTH_2_BYTES:
385 	case DMA_SLAVE_BUSWIDTH_4_BYTES:
386 	case DMA_SLAVE_BUSWIDTH_8_BYTES:
387 		return ffs(width) - 1;
388 	default:
389 		dev_err(chan2dev(chan), "Dma bus width %i not supported\n",
390 			width);
391 		return -EINVAL;
392 	}
393 }
394 
stm32_mdma_get_max_width(dma_addr_t addr,u32 buf_len,u32 tlen)395 static enum dma_slave_buswidth stm32_mdma_get_max_width(dma_addr_t addr,
396 							u32 buf_len, u32 tlen)
397 {
398 	enum dma_slave_buswidth max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
399 
400 	for (max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
401 	     max_width > DMA_SLAVE_BUSWIDTH_1_BYTE;
402 	     max_width >>= 1) {
403 		/*
404 		 * Address and buffer length both have to be aligned on
405 		 * bus width
406 		 */
407 		if ((((buf_len | addr) & (max_width - 1)) == 0) &&
408 		    tlen >= max_width)
409 			break;
410 	}
411 
412 	return max_width;
413 }
414 
stm32_mdma_get_best_burst(u32 buf_len,u32 tlen,u32 max_burst,enum dma_slave_buswidth width)415 static u32 stm32_mdma_get_best_burst(u32 buf_len, u32 tlen, u32 max_burst,
416 				     enum dma_slave_buswidth width)
417 {
418 	u32 best_burst;
419 
420 	best_burst = min((u32)1 << __ffs(tlen | buf_len),
421 			 max_burst * width) / width;
422 
423 	return (best_burst > 0) ? best_burst : 1;
424 }
425 
stm32_mdma_disable_chan(struct stm32_mdma_chan * chan)426 static int stm32_mdma_disable_chan(struct stm32_mdma_chan *chan)
427 {
428 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
429 	u32 ccr, cisr, id, reg;
430 	int ret;
431 
432 	id = chan->id;
433 	reg = STM32_MDMA_CCR(id);
434 
435 	/* Disable interrupts */
436 	stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_IRQ_MASK);
437 
438 	ccr = stm32_mdma_read(dmadev, reg);
439 	if (ccr & STM32_MDMA_CCR_EN) {
440 		stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_EN);
441 
442 		/* Ensure that any ongoing transfer has been completed */
443 		ret = readl_relaxed_poll_timeout_atomic(
444 				dmadev->base + STM32_MDMA_CISR(id), cisr,
445 				(cisr & STM32_MDMA_CISR_CTCIF), 10, 1000);
446 		if (ret) {
447 			dev_err(chan2dev(chan), "%s: timeout!\n", __func__);
448 			return -EBUSY;
449 		}
450 	}
451 
452 	return 0;
453 }
454 
stm32_mdma_stop(struct stm32_mdma_chan * chan)455 static void stm32_mdma_stop(struct stm32_mdma_chan *chan)
456 {
457 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
458 	u32 status;
459 	int ret;
460 
461 	/* Disable DMA */
462 	ret = stm32_mdma_disable_chan(chan);
463 	if (ret < 0)
464 		return;
465 
466 	/* Clear interrupt status if it is there */
467 	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
468 	if (status) {
469 		dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n",
470 			__func__, status);
471 		stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
472 	}
473 
474 	chan->busy = false;
475 }
476 
stm32_mdma_set_bus(struct stm32_mdma_device * dmadev,u32 * ctbr,u32 ctbr_mask,u32 src_addr)477 static void stm32_mdma_set_bus(struct stm32_mdma_device *dmadev, u32 *ctbr,
478 			       u32 ctbr_mask, u32 src_addr)
479 {
480 	u32 mask;
481 	int i;
482 
483 	/* Check if memory device is on AHB or AXI */
484 	*ctbr &= ~ctbr_mask;
485 	mask = src_addr & 0xF0000000;
486 	for (i = 0; i < dmadev->nr_ahb_addr_masks; i++) {
487 		if (mask == dmadev->ahb_addr_masks[i]) {
488 			*ctbr |= ctbr_mask;
489 			break;
490 		}
491 	}
492 }
493 
stm32_mdma_set_xfer_param(struct stm32_mdma_chan * chan,enum dma_transfer_direction direction,u32 * mdma_ccr,u32 * mdma_ctcr,u32 * mdma_ctbr,dma_addr_t addr,u32 buf_len)494 static int stm32_mdma_set_xfer_param(struct stm32_mdma_chan *chan,
495 				     enum dma_transfer_direction direction,
496 				     u32 *mdma_ccr, u32 *mdma_ctcr,
497 				     u32 *mdma_ctbr, dma_addr_t addr,
498 				     u32 buf_len)
499 {
500 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
501 	struct stm32_mdma_chan_config *chan_config = &chan->chan_config;
502 	enum dma_slave_buswidth src_addr_width, dst_addr_width;
503 	phys_addr_t src_addr, dst_addr;
504 	int src_bus_width, dst_bus_width;
505 	u32 src_maxburst, dst_maxburst, src_best_burst, dst_best_burst;
506 	u32 ccr, ctcr, ctbr, tlen;
507 
508 	src_addr_width = chan->dma_config.src_addr_width;
509 	dst_addr_width = chan->dma_config.dst_addr_width;
510 	src_maxburst = chan->dma_config.src_maxburst;
511 	dst_maxburst = chan->dma_config.dst_maxburst;
512 
513 	ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)) & ~STM32_MDMA_CCR_EN;
514 	ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
515 	ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
516 
517 	/* Enable HW request mode */
518 	ctcr &= ~STM32_MDMA_CTCR_SWRM;
519 
520 	/* Set DINC, SINC, DINCOS, SINCOS, TRGM and TLEN retrieve from DT */
521 	ctcr &= ~STM32_MDMA_CTCR_CFG_MASK;
522 	ctcr |= chan_config->transfer_config & STM32_MDMA_CTCR_CFG_MASK;
523 
524 	/*
525 	 * For buffer transfer length (TLEN) we have to set
526 	 * the number of bytes - 1 in CTCR register
527 	 */
528 	tlen = STM32_MDMA_CTCR_LEN2_GET(ctcr);
529 	ctcr &= ~STM32_MDMA_CTCR_LEN2_MSK;
530 	ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
531 
532 	/* Disable Pack Enable */
533 	ctcr &= ~STM32_MDMA_CTCR_PKE;
534 
535 	/* Check burst size constraints */
536 	if (src_maxburst * src_addr_width > STM32_MDMA_MAX_BURST ||
537 	    dst_maxburst * dst_addr_width > STM32_MDMA_MAX_BURST) {
538 		dev_err(chan2dev(chan),
539 			"burst size * bus width higher than %d bytes\n",
540 			STM32_MDMA_MAX_BURST);
541 		return -EINVAL;
542 	}
543 
544 	if ((!is_power_of_2(src_maxburst) && src_maxburst > 0) ||
545 	    (!is_power_of_2(dst_maxburst) && dst_maxburst > 0)) {
546 		dev_err(chan2dev(chan), "burst size must be a power of 2\n");
547 		return -EINVAL;
548 	}
549 
550 	/*
551 	 * Configure channel control:
552 	 * - Clear SW request as in this case this is a HW one
553 	 * - Clear WEX, HEX and BEX bits
554 	 * - Set priority level
555 	 */
556 	ccr &= ~(STM32_MDMA_CCR_SWRQ | STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
557 		 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK);
558 	ccr |= STM32_MDMA_CCR_PL(chan_config->priority_level);
559 
560 	/* Configure Trigger selection */
561 	ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
562 	ctbr |= STM32_MDMA_CTBR_TSEL(chan_config->request);
563 
564 	switch (direction) {
565 	case DMA_MEM_TO_DEV:
566 		dst_addr = chan->dma_config.dst_addr;
567 
568 		/* Set device data size */
569 		dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
570 		if (dst_bus_width < 0)
571 			return dst_bus_width;
572 		ctcr &= ~STM32_MDMA_CTCR_DSIZE_MASK;
573 		ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width);
574 
575 		/* Set device burst value */
576 		dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
577 							   dst_maxburst,
578 							   dst_addr_width);
579 		chan->mem_burst = dst_best_burst;
580 		ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
581 		ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
582 
583 		/* Set memory data size */
584 		src_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
585 		chan->mem_width = src_addr_width;
586 		src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
587 		if (src_bus_width < 0)
588 			return src_bus_width;
589 		ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK |
590 			STM32_MDMA_CTCR_SINCOS_MASK;
591 		ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width) |
592 			STM32_MDMA_CTCR_SINCOS(src_bus_width);
593 
594 		/* Set memory burst value */
595 		src_maxburst = STM32_MDMA_MAX_BUF_LEN / src_addr_width;
596 		src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
597 							   src_maxburst,
598 							   src_addr_width);
599 		chan->mem_burst = src_best_burst;
600 		ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
601 		ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
602 
603 		/* Select bus */
604 		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
605 				   dst_addr);
606 
607 		if (dst_bus_width != src_bus_width)
608 			ctcr |= STM32_MDMA_CTCR_PKE;
609 
610 		/* Set destination address */
611 		stm32_mdma_write(dmadev, STM32_MDMA_CDAR(chan->id), dst_addr);
612 		break;
613 
614 	case DMA_DEV_TO_MEM:
615 		src_addr = chan->dma_config.src_addr;
616 
617 		/* Set device data size */
618 		src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
619 		if (src_bus_width < 0)
620 			return src_bus_width;
621 		ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK;
622 		ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width);
623 
624 		/* Set device burst value */
625 		src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
626 							   src_maxburst,
627 							   src_addr_width);
628 		ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
629 		ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
630 
631 		/* Set memory data size */
632 		dst_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
633 		chan->mem_width = dst_addr_width;
634 		dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
635 		if (dst_bus_width < 0)
636 			return dst_bus_width;
637 		ctcr &= ~(STM32_MDMA_CTCR_DSIZE_MASK |
638 			STM32_MDMA_CTCR_DINCOS_MASK);
639 		ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
640 			STM32_MDMA_CTCR_DINCOS(dst_bus_width);
641 
642 		/* Set memory burst value */
643 		dst_maxburst = STM32_MDMA_MAX_BUF_LEN / dst_addr_width;
644 		dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
645 							   dst_maxburst,
646 							   dst_addr_width);
647 		ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
648 		ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
649 
650 		/* Select bus */
651 		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
652 				   src_addr);
653 
654 		if (dst_bus_width != src_bus_width)
655 			ctcr |= STM32_MDMA_CTCR_PKE;
656 
657 		/* Set source address */
658 		stm32_mdma_write(dmadev, STM32_MDMA_CSAR(chan->id), src_addr);
659 		break;
660 
661 	default:
662 		dev_err(chan2dev(chan), "Dma direction is not supported\n");
663 		return -EINVAL;
664 	}
665 
666 	*mdma_ccr = ccr;
667 	*mdma_ctcr = ctcr;
668 	*mdma_ctbr = ctbr;
669 
670 	return 0;
671 }
672 
stm32_mdma_dump_hwdesc(struct stm32_mdma_chan * chan,struct stm32_mdma_desc_node * node)673 static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan *chan,
674 				   struct stm32_mdma_desc_node *node)
675 {
676 	dev_dbg(chan2dev(chan), "hwdesc:  %pad\n", &node->hwdesc_phys);
677 	dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n", node->hwdesc->ctcr);
678 	dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n", node->hwdesc->cbndtr);
679 	dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n", node->hwdesc->csar);
680 	dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n", node->hwdesc->cdar);
681 	dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n", node->hwdesc->cbrur);
682 	dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n", node->hwdesc->clar);
683 	dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n", node->hwdesc->ctbr);
684 	dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n", node->hwdesc->cmar);
685 	dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n\n", node->hwdesc->cmdr);
686 }
687 
stm32_mdma_setup_hwdesc(struct stm32_mdma_chan * chan,struct stm32_mdma_desc * desc,enum dma_transfer_direction dir,u32 count,dma_addr_t src_addr,dma_addr_t dst_addr,u32 len,u32 ctcr,u32 ctbr,bool is_last,bool is_first,bool is_cyclic)688 static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan,
689 				    struct stm32_mdma_desc *desc,
690 				    enum dma_transfer_direction dir, u32 count,
691 				    dma_addr_t src_addr, dma_addr_t dst_addr,
692 				    u32 len, u32 ctcr, u32 ctbr, bool is_last,
693 				    bool is_first, bool is_cyclic)
694 {
695 	struct stm32_mdma_chan_config *config = &chan->chan_config;
696 	struct stm32_mdma_hwdesc *hwdesc;
697 	u32 next = count + 1;
698 
699 	hwdesc = desc->node[count].hwdesc;
700 	hwdesc->ctcr = ctcr;
701 	hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK |
702 			STM32_MDMA_CBNDTR_BRDUM |
703 			STM32_MDMA_CBNDTR_BRSUM |
704 			STM32_MDMA_CBNDTR_BNDT_MASK);
705 	hwdesc->cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
706 	hwdesc->csar = src_addr;
707 	hwdesc->cdar = dst_addr;
708 	hwdesc->cbrur = 0;
709 	hwdesc->ctbr = ctbr;
710 	hwdesc->cmar = config->mask_addr;
711 	hwdesc->cmdr = config->mask_data;
712 
713 	if (is_last) {
714 		if (is_cyclic)
715 			hwdesc->clar = desc->node[0].hwdesc_phys;
716 		else
717 			hwdesc->clar = 0;
718 	} else {
719 		hwdesc->clar = desc->node[next].hwdesc_phys;
720 	}
721 
722 	stm32_mdma_dump_hwdesc(chan, &desc->node[count]);
723 }
724 
stm32_mdma_setup_xfer(struct stm32_mdma_chan * chan,struct stm32_mdma_desc * desc,struct scatterlist * sgl,u32 sg_len,enum dma_transfer_direction direction)725 static int stm32_mdma_setup_xfer(struct stm32_mdma_chan *chan,
726 				 struct stm32_mdma_desc *desc,
727 				 struct scatterlist *sgl, u32 sg_len,
728 				 enum dma_transfer_direction direction)
729 {
730 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
731 	struct dma_slave_config *dma_config = &chan->dma_config;
732 	struct scatterlist *sg;
733 	dma_addr_t src_addr, dst_addr;
734 	u32 ccr, ctcr, ctbr;
735 	int i, ret = 0;
736 
737 	for_each_sg(sgl, sg, sg_len, i) {
738 		if (sg_dma_len(sg) > STM32_MDMA_MAX_BLOCK_LEN) {
739 			dev_err(chan2dev(chan), "Invalid block len\n");
740 			return -EINVAL;
741 		}
742 
743 		if (direction == DMA_MEM_TO_DEV) {
744 			src_addr = sg_dma_address(sg);
745 			dst_addr = dma_config->dst_addr;
746 			ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
747 							&ctcr, &ctbr, src_addr,
748 							sg_dma_len(sg));
749 			stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
750 					   src_addr);
751 		} else {
752 			src_addr = dma_config->src_addr;
753 			dst_addr = sg_dma_address(sg);
754 			ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
755 							&ctcr, &ctbr, dst_addr,
756 							sg_dma_len(sg));
757 			stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
758 					   dst_addr);
759 		}
760 
761 		if (ret < 0)
762 			return ret;
763 
764 		stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
765 					dst_addr, sg_dma_len(sg), ctcr, ctbr,
766 					i == sg_len - 1, i == 0, false);
767 	}
768 
769 	/* Enable interrupts */
770 	ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
771 	ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE;
772 	if (sg_len > 1)
773 		ccr |= STM32_MDMA_CCR_BTIE;
774 	desc->ccr = ccr;
775 
776 	return 0;
777 }
778 
779 static struct dma_async_tx_descriptor *
stm32_mdma_prep_slave_sg(struct dma_chan * c,struct scatterlist * sgl,u32 sg_len,enum dma_transfer_direction direction,unsigned long flags,void * context)780 stm32_mdma_prep_slave_sg(struct dma_chan *c, struct scatterlist *sgl,
781 			 u32 sg_len, enum dma_transfer_direction direction,
782 			 unsigned long flags, void *context)
783 {
784 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
785 	struct stm32_mdma_desc *desc;
786 	int i, ret;
787 
788 	/*
789 	 * Once DMA is in setup cyclic mode the channel we cannot assign this
790 	 * channel anymore. The DMA channel needs to be aborted or terminated
791 	 * for allowing another request.
792 	 */
793 	if (chan->desc && chan->desc->cyclic) {
794 		dev_err(chan2dev(chan),
795 			"Request not allowed when dma in cyclic mode\n");
796 		return NULL;
797 	}
798 
799 	desc = stm32_mdma_alloc_desc(chan, sg_len);
800 	if (!desc)
801 		return NULL;
802 
803 	ret = stm32_mdma_setup_xfer(chan, desc, sgl, sg_len, direction);
804 	if (ret < 0)
805 		goto xfer_setup_err;
806 
807 	desc->cyclic = false;
808 
809 	return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
810 
811 xfer_setup_err:
812 	for (i = 0; i < desc->count; i++)
813 		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
814 			      desc->node[i].hwdesc_phys);
815 	kfree(desc);
816 	return NULL;
817 }
818 
819 static struct dma_async_tx_descriptor *
stm32_mdma_prep_dma_cyclic(struct dma_chan * c,dma_addr_t buf_addr,size_t buf_len,size_t period_len,enum dma_transfer_direction direction,unsigned long flags)820 stm32_mdma_prep_dma_cyclic(struct dma_chan *c, dma_addr_t buf_addr,
821 			   size_t buf_len, size_t period_len,
822 			   enum dma_transfer_direction direction,
823 			   unsigned long flags)
824 {
825 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
826 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
827 	struct dma_slave_config *dma_config = &chan->dma_config;
828 	struct stm32_mdma_desc *desc;
829 	dma_addr_t src_addr, dst_addr;
830 	u32 ccr, ctcr, ctbr, count;
831 	int i, ret;
832 
833 	/*
834 	 * Once DMA is in setup cyclic mode the channel we cannot assign this
835 	 * channel anymore. The DMA channel needs to be aborted or terminated
836 	 * for allowing another request.
837 	 */
838 	if (chan->desc && chan->desc->cyclic) {
839 		dev_err(chan2dev(chan),
840 			"Request not allowed when dma in cyclic mode\n");
841 		return NULL;
842 	}
843 
844 	if (!buf_len || !period_len || period_len > STM32_MDMA_MAX_BLOCK_LEN) {
845 		dev_err(chan2dev(chan), "Invalid buffer/period len\n");
846 		return NULL;
847 	}
848 
849 	if (buf_len % period_len) {
850 		dev_err(chan2dev(chan), "buf_len not multiple of period_len\n");
851 		return NULL;
852 	}
853 
854 	count = buf_len / period_len;
855 
856 	desc = stm32_mdma_alloc_desc(chan, count);
857 	if (!desc)
858 		return NULL;
859 
860 	/* Select bus */
861 	if (direction == DMA_MEM_TO_DEV) {
862 		src_addr = buf_addr;
863 		ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
864 						&ctbr, src_addr, period_len);
865 		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
866 				   src_addr);
867 	} else {
868 		dst_addr = buf_addr;
869 		ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
870 						&ctbr, dst_addr, period_len);
871 		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
872 				   dst_addr);
873 	}
874 
875 	if (ret < 0)
876 		goto xfer_setup_err;
877 
878 	/* Enable interrupts */
879 	ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
880 	ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE | STM32_MDMA_CCR_BTIE;
881 	desc->ccr = ccr;
882 
883 	/* Configure hwdesc list */
884 	for (i = 0; i < count; i++) {
885 		if (direction == DMA_MEM_TO_DEV) {
886 			src_addr = buf_addr + i * period_len;
887 			dst_addr = dma_config->dst_addr;
888 		} else {
889 			src_addr = dma_config->src_addr;
890 			dst_addr = buf_addr + i * period_len;
891 		}
892 
893 		stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
894 					dst_addr, period_len, ctcr, ctbr,
895 					i == count - 1, i == 0, true);
896 	}
897 
898 	desc->cyclic = true;
899 
900 	return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
901 
902 xfer_setup_err:
903 	for (i = 0; i < desc->count; i++)
904 		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
905 			      desc->node[i].hwdesc_phys);
906 	kfree(desc);
907 	return NULL;
908 }
909 
910 static struct dma_async_tx_descriptor *
stm32_mdma_prep_dma_memcpy(struct dma_chan * c,dma_addr_t dest,dma_addr_t src,size_t len,unsigned long flags)911 stm32_mdma_prep_dma_memcpy(struct dma_chan *c, dma_addr_t dest, dma_addr_t src,
912 			   size_t len, unsigned long flags)
913 {
914 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
915 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
916 	enum dma_slave_buswidth max_width;
917 	struct stm32_mdma_desc *desc;
918 	struct stm32_mdma_hwdesc *hwdesc;
919 	u32 ccr, ctcr, ctbr, cbndtr, count, max_burst, mdma_burst;
920 	u32 best_burst, tlen;
921 	size_t xfer_count, offset;
922 	int src_bus_width, dst_bus_width;
923 	int i;
924 
925 	/*
926 	 * Once DMA is in setup cyclic mode the channel we cannot assign this
927 	 * channel anymore. The DMA channel needs to be aborted or terminated
928 	 * to allow another request
929 	 */
930 	if (chan->desc && chan->desc->cyclic) {
931 		dev_err(chan2dev(chan),
932 			"Request not allowed when dma in cyclic mode\n");
933 		return NULL;
934 	}
935 
936 	count = DIV_ROUND_UP(len, STM32_MDMA_MAX_BLOCK_LEN);
937 	desc = stm32_mdma_alloc_desc(chan, count);
938 	if (!desc)
939 		return NULL;
940 
941 	ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)) & ~STM32_MDMA_CCR_EN;
942 	ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
943 	ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
944 	cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
945 
946 	/* Enable sw req, some interrupts and clear other bits */
947 	ccr &= ~(STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
948 		 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK |
949 		 STM32_MDMA_CCR_IRQ_MASK);
950 	ccr |= STM32_MDMA_CCR_TEIE;
951 
952 	/* Enable SW request mode, dest/src inc and clear other bits */
953 	ctcr &= ~(STM32_MDMA_CTCR_BWM | STM32_MDMA_CTCR_TRGM_MSK |
954 		  STM32_MDMA_CTCR_PAM_MASK | STM32_MDMA_CTCR_PKE |
955 		  STM32_MDMA_CTCR_TLEN_MSK | STM32_MDMA_CTCR_DBURST_MASK |
956 		  STM32_MDMA_CTCR_SBURST_MASK | STM32_MDMA_CTCR_DINCOS_MASK |
957 		  STM32_MDMA_CTCR_SINCOS_MASK | STM32_MDMA_CTCR_DSIZE_MASK |
958 		  STM32_MDMA_CTCR_SSIZE_MASK | STM32_MDMA_CTCR_DINC_MASK |
959 		  STM32_MDMA_CTCR_SINC_MASK);
960 	ctcr |= STM32_MDMA_CTCR_SWRM | STM32_MDMA_CTCR_SINC(STM32_MDMA_INC) |
961 		STM32_MDMA_CTCR_DINC(STM32_MDMA_INC);
962 
963 	/* Reset HW request */
964 	ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
965 
966 	/* Select bus */
967 	stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, src);
968 	stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, dest);
969 
970 	/* Clear CBNDTR registers */
971 	cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | STM32_MDMA_CBNDTR_BRDUM |
972 			STM32_MDMA_CBNDTR_BRSUM | STM32_MDMA_CBNDTR_BNDT_MASK);
973 
974 	if (len <= STM32_MDMA_MAX_BLOCK_LEN) {
975 		cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
976 		if (len <= STM32_MDMA_MAX_BUF_LEN) {
977 			/* Setup a buffer transfer */
978 			ccr |= STM32_MDMA_CCR_TCIE | STM32_MDMA_CCR_CTCIE;
979 			ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BUFFER);
980 		} else {
981 			/* Setup a block transfer */
982 			ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
983 			ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BLOCK);
984 		}
985 
986 		tlen = STM32_MDMA_MAX_BUF_LEN;
987 		ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
988 
989 		/* Set source best burst size */
990 		max_width = stm32_mdma_get_max_width(src, len, tlen);
991 		src_bus_width = stm32_mdma_get_width(chan, max_width);
992 
993 		max_burst = tlen / max_width;
994 		best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
995 						       max_width);
996 		mdma_burst = ilog2(best_burst);
997 
998 		ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
999 			STM32_MDMA_CTCR_SSIZE(src_bus_width) |
1000 			STM32_MDMA_CTCR_SINCOS(src_bus_width);
1001 
1002 		/* Set destination best burst size */
1003 		max_width = stm32_mdma_get_max_width(dest, len, tlen);
1004 		dst_bus_width = stm32_mdma_get_width(chan, max_width);
1005 
1006 		max_burst = tlen / max_width;
1007 		best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
1008 						       max_width);
1009 		mdma_burst = ilog2(best_burst);
1010 
1011 		ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1012 			STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1013 			STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1014 
1015 		if (dst_bus_width != src_bus_width)
1016 			ctcr |= STM32_MDMA_CTCR_PKE;
1017 
1018 		/* Prepare hardware descriptor */
1019 		hwdesc = desc->node[0].hwdesc;
1020 		hwdesc->ctcr = ctcr;
1021 		hwdesc->cbndtr = cbndtr;
1022 		hwdesc->csar = src;
1023 		hwdesc->cdar = dest;
1024 		hwdesc->cbrur = 0;
1025 		hwdesc->clar = 0;
1026 		hwdesc->ctbr = ctbr;
1027 		hwdesc->cmar = 0;
1028 		hwdesc->cmdr = 0;
1029 
1030 		stm32_mdma_dump_hwdesc(chan, &desc->node[0]);
1031 	} else {
1032 		/* Setup a LLI transfer */
1033 		ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST) |
1034 			STM32_MDMA_CTCR_TLEN((STM32_MDMA_MAX_BUF_LEN - 1));
1035 		ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
1036 		tlen = STM32_MDMA_MAX_BUF_LEN;
1037 
1038 		for (i = 0, offset = 0; offset < len;
1039 		     i++, offset += xfer_count) {
1040 			xfer_count = min_t(size_t, len - offset,
1041 					   STM32_MDMA_MAX_BLOCK_LEN);
1042 
1043 			/* Set source best burst size */
1044 			max_width = stm32_mdma_get_max_width(src, len, tlen);
1045 			src_bus_width = stm32_mdma_get_width(chan, max_width);
1046 
1047 			max_burst = tlen / max_width;
1048 			best_burst = stm32_mdma_get_best_burst(len, tlen,
1049 							       max_burst,
1050 							       max_width);
1051 			mdma_burst = ilog2(best_burst);
1052 
1053 			ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
1054 				STM32_MDMA_CTCR_SSIZE(src_bus_width) |
1055 				STM32_MDMA_CTCR_SINCOS(src_bus_width);
1056 
1057 			/* Set destination best burst size */
1058 			max_width = stm32_mdma_get_max_width(dest, len, tlen);
1059 			dst_bus_width = stm32_mdma_get_width(chan, max_width);
1060 
1061 			max_burst = tlen / max_width;
1062 			best_burst = stm32_mdma_get_best_burst(len, tlen,
1063 							       max_burst,
1064 							       max_width);
1065 			mdma_burst = ilog2(best_burst);
1066 
1067 			ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1068 				STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1069 				STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1070 
1071 			if (dst_bus_width != src_bus_width)
1072 				ctcr |= STM32_MDMA_CTCR_PKE;
1073 
1074 			/* Prepare hardware descriptor */
1075 			stm32_mdma_setup_hwdesc(chan, desc, DMA_MEM_TO_MEM, i,
1076 						src + offset, dest + offset,
1077 						xfer_count, ctcr, ctbr,
1078 						i == count - 1, i == 0, false);
1079 		}
1080 	}
1081 
1082 	desc->ccr = ccr;
1083 
1084 	desc->cyclic = false;
1085 
1086 	return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
1087 }
1088 
stm32_mdma_dump_reg(struct stm32_mdma_chan * chan)1089 static void stm32_mdma_dump_reg(struct stm32_mdma_chan *chan)
1090 {
1091 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1092 
1093 	dev_dbg(chan2dev(chan), "CCR:     0x%08x\n",
1094 		stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)));
1095 	dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n",
1096 		stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)));
1097 	dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n",
1098 		stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)));
1099 	dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n",
1100 		stm32_mdma_read(dmadev, STM32_MDMA_CSAR(chan->id)));
1101 	dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n",
1102 		stm32_mdma_read(dmadev, STM32_MDMA_CDAR(chan->id)));
1103 	dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n",
1104 		stm32_mdma_read(dmadev, STM32_MDMA_CBRUR(chan->id)));
1105 	dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n",
1106 		stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id)));
1107 	dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n",
1108 		stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)));
1109 	dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n",
1110 		stm32_mdma_read(dmadev, STM32_MDMA_CMAR(chan->id)));
1111 	dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n",
1112 		stm32_mdma_read(dmadev, STM32_MDMA_CMDR(chan->id)));
1113 }
1114 
stm32_mdma_start_transfer(struct stm32_mdma_chan * chan)1115 static void stm32_mdma_start_transfer(struct stm32_mdma_chan *chan)
1116 {
1117 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1118 	struct virt_dma_desc *vdesc;
1119 	struct stm32_mdma_hwdesc *hwdesc;
1120 	u32 id = chan->id;
1121 	u32 status, reg;
1122 
1123 	vdesc = vchan_next_desc(&chan->vchan);
1124 	if (!vdesc) {
1125 		chan->desc = NULL;
1126 		return;
1127 	}
1128 
1129 	list_del(&vdesc->node);
1130 
1131 	chan->desc = to_stm32_mdma_desc(vdesc);
1132 	hwdesc = chan->desc->node[0].hwdesc;
1133 	chan->curr_hwdesc = 0;
1134 
1135 	stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr);
1136 	stm32_mdma_write(dmadev, STM32_MDMA_CTCR(id), hwdesc->ctcr);
1137 	stm32_mdma_write(dmadev, STM32_MDMA_CBNDTR(id), hwdesc->cbndtr);
1138 	stm32_mdma_write(dmadev, STM32_MDMA_CSAR(id), hwdesc->csar);
1139 	stm32_mdma_write(dmadev, STM32_MDMA_CDAR(id), hwdesc->cdar);
1140 	stm32_mdma_write(dmadev, STM32_MDMA_CBRUR(id), hwdesc->cbrur);
1141 	stm32_mdma_write(dmadev, STM32_MDMA_CLAR(id), hwdesc->clar);
1142 	stm32_mdma_write(dmadev, STM32_MDMA_CTBR(id), hwdesc->ctbr);
1143 	stm32_mdma_write(dmadev, STM32_MDMA_CMAR(id), hwdesc->cmar);
1144 	stm32_mdma_write(dmadev, STM32_MDMA_CMDR(id), hwdesc->cmdr);
1145 
1146 	/* Clear interrupt status if it is there */
1147 	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1148 	if (status)
1149 		stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(id), status);
1150 
1151 	stm32_mdma_dump_reg(chan);
1152 
1153 	/* Start DMA */
1154 	stm32_mdma_set_bits(dmadev, STM32_MDMA_CCR(id), STM32_MDMA_CCR_EN);
1155 
1156 	/* Set SW request in case of MEM2MEM transfer */
1157 	if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) {
1158 		reg = STM32_MDMA_CCR(id);
1159 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1160 	}
1161 
1162 	chan->busy = true;
1163 
1164 	dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan);
1165 }
1166 
stm32_mdma_issue_pending(struct dma_chan * c)1167 static void stm32_mdma_issue_pending(struct dma_chan *c)
1168 {
1169 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1170 	unsigned long flags;
1171 
1172 	spin_lock_irqsave(&chan->vchan.lock, flags);
1173 
1174 	if (!vchan_issue_pending(&chan->vchan))
1175 		goto end;
1176 
1177 	dev_dbg(chan2dev(chan), "vchan %pK: issued\n", &chan->vchan);
1178 
1179 	if (!chan->desc && !chan->busy)
1180 		stm32_mdma_start_transfer(chan);
1181 
1182 end:
1183 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1184 }
1185 
stm32_mdma_pause(struct dma_chan * c)1186 static int stm32_mdma_pause(struct dma_chan *c)
1187 {
1188 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1189 	unsigned long flags;
1190 	int ret;
1191 
1192 	spin_lock_irqsave(&chan->vchan.lock, flags);
1193 	ret = stm32_mdma_disable_chan(chan);
1194 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1195 
1196 	if (!ret)
1197 		dev_dbg(chan2dev(chan), "vchan %pK: pause\n", &chan->vchan);
1198 
1199 	return ret;
1200 }
1201 
stm32_mdma_resume(struct dma_chan * c)1202 static int stm32_mdma_resume(struct dma_chan *c)
1203 {
1204 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1205 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1206 	struct stm32_mdma_hwdesc *hwdesc;
1207 	unsigned long flags;
1208 	u32 status, reg;
1209 
1210 	/* Transfer can be terminated */
1211 	if (!chan->desc || (stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)) & STM32_MDMA_CCR_EN))
1212 		return -EPERM;
1213 
1214 	hwdesc = chan->desc->node[chan->curr_hwdesc].hwdesc;
1215 
1216 	spin_lock_irqsave(&chan->vchan.lock, flags);
1217 
1218 	/* Re-configure control register */
1219 	stm32_mdma_write(dmadev, STM32_MDMA_CCR(chan->id), chan->desc->ccr);
1220 
1221 	/* Clear interrupt status if it is there */
1222 	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
1223 	if (status)
1224 		stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
1225 
1226 	stm32_mdma_dump_reg(chan);
1227 
1228 	/* Re-start DMA */
1229 	reg = STM32_MDMA_CCR(chan->id);
1230 	stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_EN);
1231 
1232 	/* Set SW request in case of MEM2MEM transfer */
1233 	if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM)
1234 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1235 
1236 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1237 
1238 	dev_dbg(chan2dev(chan), "vchan %pK: resume\n", &chan->vchan);
1239 
1240 	return 0;
1241 }
1242 
stm32_mdma_terminate_all(struct dma_chan * c)1243 static int stm32_mdma_terminate_all(struct dma_chan *c)
1244 {
1245 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1246 	unsigned long flags;
1247 	LIST_HEAD(head);
1248 
1249 	spin_lock_irqsave(&chan->vchan.lock, flags);
1250 	if (chan->desc) {
1251 		vchan_terminate_vdesc(&chan->desc->vdesc);
1252 		if (chan->busy)
1253 			stm32_mdma_stop(chan);
1254 		chan->desc = NULL;
1255 	}
1256 	vchan_get_all_descriptors(&chan->vchan, &head);
1257 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1258 
1259 	vchan_dma_desc_free_list(&chan->vchan, &head);
1260 
1261 	return 0;
1262 }
1263 
stm32_mdma_synchronize(struct dma_chan * c)1264 static void stm32_mdma_synchronize(struct dma_chan *c)
1265 {
1266 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1267 
1268 	vchan_synchronize(&chan->vchan);
1269 }
1270 
stm32_mdma_slave_config(struct dma_chan * c,struct dma_slave_config * config)1271 static int stm32_mdma_slave_config(struct dma_chan *c,
1272 				   struct dma_slave_config *config)
1273 {
1274 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1275 
1276 	memcpy(&chan->dma_config, config, sizeof(*config));
1277 
1278 	return 0;
1279 }
1280 
stm32_mdma_desc_residue(struct stm32_mdma_chan * chan,struct stm32_mdma_desc * desc,u32 curr_hwdesc)1281 static size_t stm32_mdma_desc_residue(struct stm32_mdma_chan *chan,
1282 				      struct stm32_mdma_desc *desc,
1283 				      u32 curr_hwdesc)
1284 {
1285 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1286 	struct stm32_mdma_hwdesc *hwdesc = desc->node[0].hwdesc;
1287 	u32 cbndtr, residue, modulo, burst_size;
1288 	int i;
1289 
1290 	residue = 0;
1291 	for (i = curr_hwdesc + 1; i < desc->count; i++) {
1292 		hwdesc = desc->node[i].hwdesc;
1293 		residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr);
1294 	}
1295 	cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
1296 	residue += cbndtr & STM32_MDMA_CBNDTR_BNDT_MASK;
1297 
1298 	if (!chan->mem_burst)
1299 		return residue;
1300 
1301 	burst_size = chan->mem_burst * chan->mem_width;
1302 	modulo = residue % burst_size;
1303 	if (modulo)
1304 		residue = residue - modulo + burst_size;
1305 
1306 	return residue;
1307 }
1308 
stm32_mdma_tx_status(struct dma_chan * c,dma_cookie_t cookie,struct dma_tx_state * state)1309 static enum dma_status stm32_mdma_tx_status(struct dma_chan *c,
1310 					    dma_cookie_t cookie,
1311 					    struct dma_tx_state *state)
1312 {
1313 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1314 	struct virt_dma_desc *vdesc;
1315 	enum dma_status status;
1316 	unsigned long flags;
1317 	u32 residue = 0;
1318 
1319 	status = dma_cookie_status(c, cookie, state);
1320 	if ((status == DMA_COMPLETE) || (!state))
1321 		return status;
1322 
1323 	spin_lock_irqsave(&chan->vchan.lock, flags);
1324 
1325 	vdesc = vchan_find_desc(&chan->vchan, cookie);
1326 	if (chan->desc && cookie == chan->desc->vdesc.tx.cookie)
1327 		residue = stm32_mdma_desc_residue(chan, chan->desc,
1328 						  chan->curr_hwdesc);
1329 	else if (vdesc)
1330 		residue = stm32_mdma_desc_residue(chan,
1331 						  to_stm32_mdma_desc(vdesc), 0);
1332 	dma_set_residue(state, residue);
1333 
1334 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1335 
1336 	return status;
1337 }
1338 
stm32_mdma_xfer_end(struct stm32_mdma_chan * chan)1339 static void stm32_mdma_xfer_end(struct stm32_mdma_chan *chan)
1340 {
1341 	vchan_cookie_complete(&chan->desc->vdesc);
1342 	chan->desc = NULL;
1343 	chan->busy = false;
1344 
1345 	/* Start the next transfer if this driver has a next desc */
1346 	stm32_mdma_start_transfer(chan);
1347 }
1348 
stm32_mdma_irq_handler(int irq,void * devid)1349 static irqreturn_t stm32_mdma_irq_handler(int irq, void *devid)
1350 {
1351 	struct stm32_mdma_device *dmadev = devid;
1352 	struct stm32_mdma_chan *chan = devid;
1353 	u32 reg, id, ien, status, flag;
1354 
1355 	/* Find out which channel generates the interrupt */
1356 	status = readl_relaxed(dmadev->base + STM32_MDMA_GISR0);
1357 	if (!status) {
1358 		dev_dbg(mdma2dev(dmadev), "spurious it\n");
1359 		return IRQ_NONE;
1360 	}
1361 	id = __ffs(status);
1362 
1363 	chan = &dmadev->chan[id];
1364 	if (!chan) {
1365 		dev_dbg(mdma2dev(dmadev), "MDMA channel not initialized\n");
1366 		goto exit;
1367 	}
1368 
1369 	/* Handle interrupt for the channel */
1370 	spin_lock(&chan->vchan.lock);
1371 	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
1372 	ien = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
1373 	ien &= STM32_MDMA_CCR_IRQ_MASK;
1374 	ien >>= 1;
1375 
1376 	if (!(status & ien)) {
1377 		spin_unlock(&chan->vchan.lock);
1378 		dev_dbg(chan2dev(chan),
1379 			"spurious it (status=0x%04x, ien=0x%04x)\n",
1380 			status, ien);
1381 		return IRQ_NONE;
1382 	}
1383 
1384 	flag = __ffs(status & ien);
1385 	reg = STM32_MDMA_CIFCR(chan->id);
1386 
1387 	switch (1 << flag) {
1388 	case STM32_MDMA_CISR_TEIF:
1389 		id = chan->id;
1390 		status = readl_relaxed(dmadev->base + STM32_MDMA_CESR(id));
1391 		dev_err(chan2dev(chan), "Transfer Err: stat=0x%08x\n", status);
1392 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CTEIF);
1393 		break;
1394 
1395 	case STM32_MDMA_CISR_CTCIF:
1396 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CCTCIF);
1397 		stm32_mdma_xfer_end(chan);
1398 		break;
1399 
1400 	case STM32_MDMA_CISR_BRTIF:
1401 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBRTIF);
1402 		break;
1403 
1404 	case STM32_MDMA_CISR_BTIF:
1405 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBTIF);
1406 		chan->curr_hwdesc++;
1407 		if (chan->desc && chan->desc->cyclic) {
1408 			if (chan->curr_hwdesc == chan->desc->count)
1409 				chan->curr_hwdesc = 0;
1410 			vchan_cyclic_callback(&chan->desc->vdesc);
1411 		}
1412 		break;
1413 
1414 	case STM32_MDMA_CISR_TCIF:
1415 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CLTCIF);
1416 		break;
1417 
1418 	default:
1419 		dev_err(chan2dev(chan), "it %d unhandled (status=0x%04x)\n",
1420 			1 << flag, status);
1421 	}
1422 
1423 	spin_unlock(&chan->vchan.lock);
1424 
1425 exit:
1426 	return IRQ_HANDLED;
1427 }
1428 
stm32_mdma_alloc_chan_resources(struct dma_chan * c)1429 static int stm32_mdma_alloc_chan_resources(struct dma_chan *c)
1430 {
1431 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1432 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1433 	int ret;
1434 
1435 	chan->desc_pool = dmam_pool_create(dev_name(&c->dev->device),
1436 					   c->device->dev,
1437 					   sizeof(struct stm32_mdma_hwdesc),
1438 					  __alignof__(struct stm32_mdma_hwdesc),
1439 					   0);
1440 	if (!chan->desc_pool) {
1441 		dev_err(chan2dev(chan), "failed to allocate descriptor pool\n");
1442 		return -ENOMEM;
1443 	}
1444 
1445 	ret = pm_runtime_get_sync(dmadev->ddev.dev);
1446 	if (ret < 0)
1447 		return ret;
1448 
1449 	ret = stm32_mdma_disable_chan(chan);
1450 	if (ret < 0)
1451 		pm_runtime_put(dmadev->ddev.dev);
1452 
1453 	return ret;
1454 }
1455 
stm32_mdma_free_chan_resources(struct dma_chan * c)1456 static void stm32_mdma_free_chan_resources(struct dma_chan *c)
1457 {
1458 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1459 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1460 	unsigned long flags;
1461 
1462 	dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id);
1463 
1464 	if (chan->busy) {
1465 		spin_lock_irqsave(&chan->vchan.lock, flags);
1466 		stm32_mdma_stop(chan);
1467 		chan->desc = NULL;
1468 		spin_unlock_irqrestore(&chan->vchan.lock, flags);
1469 	}
1470 
1471 	pm_runtime_put(dmadev->ddev.dev);
1472 	vchan_free_chan_resources(to_virt_chan(c));
1473 	dmam_pool_destroy(chan->desc_pool);
1474 	chan->desc_pool = NULL;
1475 }
1476 
stm32_mdma_of_xlate(struct of_phandle_args * dma_spec,struct of_dma * ofdma)1477 static struct dma_chan *stm32_mdma_of_xlate(struct of_phandle_args *dma_spec,
1478 					    struct of_dma *ofdma)
1479 {
1480 	struct stm32_mdma_device *dmadev = ofdma->of_dma_data;
1481 	struct stm32_mdma_chan *chan;
1482 	struct dma_chan *c;
1483 	struct stm32_mdma_chan_config config;
1484 
1485 	if (dma_spec->args_count < 5) {
1486 		dev_err(mdma2dev(dmadev), "Bad number of args\n");
1487 		return NULL;
1488 	}
1489 
1490 	config.request = dma_spec->args[0];
1491 	config.priority_level = dma_spec->args[1];
1492 	config.transfer_config = dma_spec->args[2];
1493 	config.mask_addr = dma_spec->args[3];
1494 	config.mask_data = dma_spec->args[4];
1495 
1496 	if (config.request >= dmadev->nr_requests) {
1497 		dev_err(mdma2dev(dmadev), "Bad request line\n");
1498 		return NULL;
1499 	}
1500 
1501 	if (config.priority_level > STM32_MDMA_VERY_HIGH_PRIORITY) {
1502 		dev_err(mdma2dev(dmadev), "Priority level not supported\n");
1503 		return NULL;
1504 	}
1505 
1506 	c = dma_get_any_slave_channel(&dmadev->ddev);
1507 	if (!c) {
1508 		dev_err(mdma2dev(dmadev), "No more channels available\n");
1509 		return NULL;
1510 	}
1511 
1512 	chan = to_stm32_mdma_chan(c);
1513 	chan->chan_config = config;
1514 
1515 	return c;
1516 }
1517 
1518 static const struct of_device_id stm32_mdma_of_match[] = {
1519 	{ .compatible = "st,stm32h7-mdma", },
1520 	{ /* sentinel */ },
1521 };
1522 MODULE_DEVICE_TABLE(of, stm32_mdma_of_match);
1523 
stm32_mdma_probe(struct platform_device * pdev)1524 static int stm32_mdma_probe(struct platform_device *pdev)
1525 {
1526 	struct stm32_mdma_chan *chan;
1527 	struct stm32_mdma_device *dmadev;
1528 	struct dma_device *dd;
1529 	struct device_node *of_node;
1530 	struct resource *res;
1531 	u32 nr_channels, nr_requests;
1532 	int i, count, ret;
1533 
1534 	of_node = pdev->dev.of_node;
1535 	if (!of_node)
1536 		return -ENODEV;
1537 
1538 	ret = device_property_read_u32(&pdev->dev, "dma-channels",
1539 				       &nr_channels);
1540 	if (ret) {
1541 		nr_channels = STM32_MDMA_MAX_CHANNELS;
1542 		dev_warn(&pdev->dev, "MDMA defaulting on %i channels\n",
1543 			 nr_channels);
1544 	}
1545 
1546 	ret = device_property_read_u32(&pdev->dev, "dma-requests",
1547 				       &nr_requests);
1548 	if (ret) {
1549 		nr_requests = STM32_MDMA_MAX_REQUESTS;
1550 		dev_warn(&pdev->dev, "MDMA defaulting on %i request lines\n",
1551 			 nr_requests);
1552 	}
1553 
1554 	count = device_property_count_u32(&pdev->dev, "st,ahb-addr-masks");
1555 	if (count < 0)
1556 		count = 0;
1557 
1558 	dmadev = devm_kzalloc(&pdev->dev, sizeof(*dmadev) + sizeof(u32) * count,
1559 			      GFP_KERNEL);
1560 	if (!dmadev)
1561 		return -ENOMEM;
1562 
1563 	dmadev->nr_channels = nr_channels;
1564 	dmadev->nr_requests = nr_requests;
1565 	device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks",
1566 				       dmadev->ahb_addr_masks,
1567 				       count);
1568 	dmadev->nr_ahb_addr_masks = count;
1569 
1570 	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1571 	dmadev->base = devm_ioremap_resource(&pdev->dev, res);
1572 	if (IS_ERR(dmadev->base))
1573 		return PTR_ERR(dmadev->base);
1574 
1575 	dmadev->clk = devm_clk_get(&pdev->dev, NULL);
1576 	if (IS_ERR(dmadev->clk)) {
1577 		ret = PTR_ERR(dmadev->clk);
1578 		if (ret == -EPROBE_DEFER)
1579 			dev_info(&pdev->dev, "Missing controller clock\n");
1580 		return ret;
1581 	}
1582 
1583 	ret = clk_prepare_enable(dmadev->clk);
1584 	if (ret < 0) {
1585 		dev_err(&pdev->dev, "clk_prep_enable error: %d\n", ret);
1586 		return ret;
1587 	}
1588 
1589 	dmadev->rst = devm_reset_control_get(&pdev->dev, NULL);
1590 	if (!IS_ERR(dmadev->rst)) {
1591 		reset_control_assert(dmadev->rst);
1592 		udelay(2);
1593 		reset_control_deassert(dmadev->rst);
1594 	}
1595 
1596 	dd = &dmadev->ddev;
1597 	dma_cap_set(DMA_SLAVE, dd->cap_mask);
1598 	dma_cap_set(DMA_PRIVATE, dd->cap_mask);
1599 	dma_cap_set(DMA_CYCLIC, dd->cap_mask);
1600 	dma_cap_set(DMA_MEMCPY, dd->cap_mask);
1601 	dd->device_alloc_chan_resources = stm32_mdma_alloc_chan_resources;
1602 	dd->device_free_chan_resources = stm32_mdma_free_chan_resources;
1603 	dd->device_tx_status = stm32_mdma_tx_status;
1604 	dd->device_issue_pending = stm32_mdma_issue_pending;
1605 	dd->device_prep_slave_sg = stm32_mdma_prep_slave_sg;
1606 	dd->device_prep_dma_cyclic = stm32_mdma_prep_dma_cyclic;
1607 	dd->device_prep_dma_memcpy = stm32_mdma_prep_dma_memcpy;
1608 	dd->device_config = stm32_mdma_slave_config;
1609 	dd->device_pause = stm32_mdma_pause;
1610 	dd->device_resume = stm32_mdma_resume;
1611 	dd->device_terminate_all = stm32_mdma_terminate_all;
1612 	dd->device_synchronize = stm32_mdma_synchronize;
1613 	dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1614 		BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1615 		BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1616 		BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1617 	dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1618 		BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1619 		BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1620 		BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1621 	dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) |
1622 		BIT(DMA_MEM_TO_MEM);
1623 	dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
1624 	dd->max_burst = STM32_MDMA_MAX_BURST;
1625 	dd->dev = &pdev->dev;
1626 	INIT_LIST_HEAD(&dd->channels);
1627 
1628 	for (i = 0; i < dmadev->nr_channels; i++) {
1629 		chan = &dmadev->chan[i];
1630 		chan->id = i;
1631 		chan->vchan.desc_free = stm32_mdma_desc_free;
1632 		vchan_init(&chan->vchan, dd);
1633 	}
1634 
1635 	dmadev->irq = platform_get_irq(pdev, 0);
1636 	if (dmadev->irq < 0)
1637 		return dmadev->irq;
1638 
1639 	ret = devm_request_irq(&pdev->dev, dmadev->irq, stm32_mdma_irq_handler,
1640 			       0, dev_name(&pdev->dev), dmadev);
1641 	if (ret) {
1642 		dev_err(&pdev->dev, "failed to request IRQ\n");
1643 		return ret;
1644 	}
1645 
1646 	ret = dmaenginem_async_device_register(dd);
1647 	if (ret)
1648 		return ret;
1649 
1650 	ret = of_dma_controller_register(of_node, stm32_mdma_of_xlate, dmadev);
1651 	if (ret < 0) {
1652 		dev_err(&pdev->dev,
1653 			"STM32 MDMA DMA OF registration failed %d\n", ret);
1654 		goto err_unregister;
1655 	}
1656 
1657 	platform_set_drvdata(pdev, dmadev);
1658 	pm_runtime_set_active(&pdev->dev);
1659 	pm_runtime_enable(&pdev->dev);
1660 	pm_runtime_get_noresume(&pdev->dev);
1661 	pm_runtime_put(&pdev->dev);
1662 
1663 	dev_info(&pdev->dev, "STM32 MDMA driver registered\n");
1664 
1665 	return 0;
1666 
1667 err_unregister:
1668 	return ret;
1669 }
1670 
1671 #ifdef CONFIG_PM
stm32_mdma_runtime_suspend(struct device * dev)1672 static int stm32_mdma_runtime_suspend(struct device *dev)
1673 {
1674 	struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1675 
1676 	clk_disable_unprepare(dmadev->clk);
1677 
1678 	return 0;
1679 }
1680 
stm32_mdma_runtime_resume(struct device * dev)1681 static int stm32_mdma_runtime_resume(struct device *dev)
1682 {
1683 	struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1684 	int ret;
1685 
1686 	ret = clk_prepare_enable(dmadev->clk);
1687 	if (ret) {
1688 		dev_err(dev, "failed to prepare_enable clock\n");
1689 		return ret;
1690 	}
1691 
1692 	return 0;
1693 }
1694 #endif
1695 
1696 static const struct dev_pm_ops stm32_mdma_pm_ops = {
1697 	SET_RUNTIME_PM_OPS(stm32_mdma_runtime_suspend,
1698 			   stm32_mdma_runtime_resume, NULL)
1699 };
1700 
1701 static struct platform_driver stm32_mdma_driver = {
1702 	.probe = stm32_mdma_probe,
1703 	.driver = {
1704 		.name = "stm32-mdma",
1705 		.of_match_table = stm32_mdma_of_match,
1706 		.pm = &stm32_mdma_pm_ops,
1707 	},
1708 };
1709 
stm32_mdma_init(void)1710 static int __init stm32_mdma_init(void)
1711 {
1712 	return platform_driver_register(&stm32_mdma_driver);
1713 }
1714 
1715 subsys_initcall(stm32_mdma_init);
1716 
1717 MODULE_DESCRIPTION("Driver for STM32 MDMA controller");
1718 MODULE_AUTHOR("M'boumba Cedric Madianga <cedric.madianga@gmail.com>");
1719 MODULE_AUTHOR("Pierre-Yves Mordret <pierre-yves.mordret@st.com>");
1720 MODULE_LICENSE("GPL v2");
1721