• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0
2 //
3 // Renesas RZ/G2L ASoC Serial Sound Interface (SSIF-2) Driver
4 //
5 // Copyright (C) 2021 Renesas Electronics Corp.
6 // Copyright (C) 2019 Chris Brandt.
7 //
8 
9 #include <linux/clk.h>
10 #include <linux/dmaengine.h>
11 #include <linux/io.h>
12 #include <linux/module.h>
13 #include <linux/of_device.h>
14 #include <linux/pm_runtime.h>
15 #include <linux/reset.h>
16 #include <sound/soc.h>
17 
18 /* REGISTER OFFSET */
19 #define SSICR			0x000
20 #define SSISR			0x004
21 #define SSIFCR			0x010
22 #define SSIFSR			0x014
23 #define SSIFTDR			0x018
24 #define SSIFRDR			0x01c
25 #define SSIOFR			0x020
26 #define SSISCR			0x024
27 
28 /* SSI REGISTER BITS */
29 #define SSICR_DWL(x)		(((x) & 0x7) << 19)
30 #define SSICR_SWL(x)		(((x) & 0x7) << 16)
31 #define SSICR_MST		BIT(14)
32 #define SSICR_CKDV(x)		(((x) & 0xf) << 4)
33 
34 #define SSICR_CKS		BIT(30)
35 #define SSICR_TUIEN		BIT(29)
36 #define SSICR_TOIEN		BIT(28)
37 #define SSICR_RUIEN		BIT(27)
38 #define SSICR_ROIEN		BIT(26)
39 #define SSICR_MST		BIT(14)
40 #define SSICR_BCKP		BIT(13)
41 #define SSICR_LRCKP		BIT(12)
42 #define SSICR_CKDV(x)		(((x) & 0xf) << 4)
43 #define SSICR_TEN		BIT(1)
44 #define SSICR_REN		BIT(0)
45 
46 #define SSISR_TUIRQ		BIT(29)
47 #define SSISR_TOIRQ		BIT(28)
48 #define SSISR_RUIRQ		BIT(27)
49 #define SSISR_ROIRQ		BIT(26)
50 #define SSISR_IIRQ		BIT(25)
51 
52 #define SSIFCR_AUCKE		BIT(31)
53 #define SSIFCR_SSIRST		BIT(16)
54 #define SSIFCR_TIE		BIT(3)
55 #define SSIFCR_RIE		BIT(2)
56 #define SSIFCR_TFRST		BIT(1)
57 #define SSIFCR_RFRST		BIT(0)
58 
59 #define SSIFSR_TDC_MASK		0x3f
60 #define SSIFSR_TDC_SHIFT	24
61 #define SSIFSR_RDC_MASK		0x3f
62 #define SSIFSR_RDC_SHIFT	8
63 
64 #define SSIFSR_TDC(x)		(((x) & 0x1f) << 24)
65 #define SSIFSR_TDE		BIT(16)
66 #define SSIFSR_RDC(x)		(((x) & 0x1f) << 8)
67 #define SSIFSR_RDF		BIT(0)
68 
69 #define SSIOFR_LRCONT		BIT(8)
70 
71 #define SSISCR_TDES(x)		(((x) & 0x1f) << 8)
72 #define SSISCR_RDFS(x)		(((x) & 0x1f) << 0)
73 
74 /* Pre allocated buffers sizes */
75 #define PREALLOC_BUFFER		(SZ_32K)
76 #define PREALLOC_BUFFER_MAX	(SZ_32K)
77 
78 #define SSI_RATES		SNDRV_PCM_RATE_8000_48000 /* 8k-44.1kHz */
79 #define SSI_FMTS		SNDRV_PCM_FMTBIT_S16_LE
80 #define SSI_CHAN_MIN		2
81 #define SSI_CHAN_MAX		2
82 #define SSI_FIFO_DEPTH		32
83 
84 struct rz_ssi_priv;
85 
86 struct rz_ssi_stream {
87 	struct rz_ssi_priv *priv;
88 	struct snd_pcm_substream *substream;
89 	int fifo_sample_size;	/* sample capacity of SSI FIFO */
90 	int dma_buffer_pos;	/* The address for the next DMA descriptor */
91 	int period_counter;	/* for keeping track of periods transferred */
92 	int sample_width;
93 	int buffer_pos;		/* current frame position in the buffer */
94 	int running;		/* 0=stopped, 1=running */
95 
96 	int uerr_num;
97 	int oerr_num;
98 
99 	struct dma_chan *dma_ch;
100 
101 	int (*transfer)(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm);
102 };
103 
104 struct rz_ssi_priv {
105 	void __iomem *base;
106 	struct platform_device *pdev;
107 	struct reset_control *rstc;
108 	struct device *dev;
109 	struct clk *sfr_clk;
110 	struct clk *clk;
111 
112 	phys_addr_t phys;
113 	int irq_int;
114 	int irq_tx;
115 	int irq_rx;
116 
117 	spinlock_t lock;
118 
119 	/*
120 	 * The SSI supports full-duplex transmission and reception.
121 	 * However, if an error occurs, channel reset (both transmission
122 	 * and reception reset) is required.
123 	 * So it is better to use as half-duplex (playing and recording
124 	 * should be done on separate channels).
125 	 */
126 	struct rz_ssi_stream playback;
127 	struct rz_ssi_stream capture;
128 
129 	/* clock */
130 	unsigned long audio_mck;
131 	unsigned long audio_clk_1;
132 	unsigned long audio_clk_2;
133 
134 	bool lrckp_fsync_fall;	/* LR clock polarity (SSICR.LRCKP) */
135 	bool bckp_rise;	/* Bit clock polarity (SSICR.BCKP) */
136 	bool dma_rt;
137 };
138 
139 static void rz_ssi_dma_complete(void *data);
140 
rz_ssi_reg_writel(struct rz_ssi_priv * priv,uint reg,u32 data)141 static void rz_ssi_reg_writel(struct rz_ssi_priv *priv, uint reg, u32 data)
142 {
143 	writel(data, (priv->base + reg));
144 }
145 
rz_ssi_reg_readl(struct rz_ssi_priv * priv,uint reg)146 static u32 rz_ssi_reg_readl(struct rz_ssi_priv *priv, uint reg)
147 {
148 	return readl(priv->base + reg);
149 }
150 
rz_ssi_reg_mask_setl(struct rz_ssi_priv * priv,uint reg,u32 bclr,u32 bset)151 static void rz_ssi_reg_mask_setl(struct rz_ssi_priv *priv, uint reg,
152 				 u32 bclr, u32 bset)
153 {
154 	u32 val;
155 
156 	val = readl(priv->base + reg);
157 	val = (val & ~bclr) | bset;
158 	writel(val, (priv->base + reg));
159 }
160 
161 static inline struct snd_soc_dai *
rz_ssi_get_dai(struct snd_pcm_substream * substream)162 rz_ssi_get_dai(struct snd_pcm_substream *substream)
163 {
164 	struct snd_soc_pcm_runtime *rtd = asoc_substream_to_rtd(substream);
165 
166 	return asoc_rtd_to_cpu(rtd, 0);
167 }
168 
rz_ssi_stream_is_play(struct rz_ssi_priv * ssi,struct snd_pcm_substream * substream)169 static inline bool rz_ssi_stream_is_play(struct rz_ssi_priv *ssi,
170 					 struct snd_pcm_substream *substream)
171 {
172 	return substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
173 }
174 
175 static inline struct rz_ssi_stream *
rz_ssi_stream_get(struct rz_ssi_priv * ssi,struct snd_pcm_substream * substream)176 rz_ssi_stream_get(struct rz_ssi_priv *ssi, struct snd_pcm_substream *substream)
177 {
178 	struct rz_ssi_stream *stream = &ssi->playback;
179 
180 	if (substream->stream != SNDRV_PCM_STREAM_PLAYBACK)
181 		stream = &ssi->capture;
182 
183 	return stream;
184 }
185 
rz_ssi_is_dma_enabled(struct rz_ssi_priv * ssi)186 static inline bool rz_ssi_is_dma_enabled(struct rz_ssi_priv *ssi)
187 {
188 	return (ssi->playback.dma_ch && (ssi->dma_rt || ssi->capture.dma_ch));
189 }
190 
rz_ssi_stream_is_valid(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)191 static int rz_ssi_stream_is_valid(struct rz_ssi_priv *ssi,
192 				  struct rz_ssi_stream *strm)
193 {
194 	unsigned long flags;
195 	int ret;
196 
197 	spin_lock_irqsave(&ssi->lock, flags);
198 	ret = !!(strm->substream && strm->substream->runtime);
199 	spin_unlock_irqrestore(&ssi->lock, flags);
200 
201 	return ret;
202 }
203 
rz_ssi_stream_init(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm,struct snd_pcm_substream * substream)204 static int rz_ssi_stream_init(struct rz_ssi_priv *ssi,
205 			      struct rz_ssi_stream *strm,
206 			      struct snd_pcm_substream *substream)
207 {
208 	struct snd_pcm_runtime *runtime = substream->runtime;
209 
210 	strm->substream = substream;
211 	strm->sample_width = samples_to_bytes(runtime, 1);
212 	strm->dma_buffer_pos = 0;
213 	strm->period_counter = 0;
214 	strm->buffer_pos = 0;
215 
216 	strm->oerr_num = 0;
217 	strm->uerr_num = 0;
218 	strm->running = 0;
219 
220 	/* fifo init */
221 	strm->fifo_sample_size = SSI_FIFO_DEPTH;
222 
223 	return 0;
224 }
225 
rz_ssi_stream_quit(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)226 static void rz_ssi_stream_quit(struct rz_ssi_priv *ssi,
227 			       struct rz_ssi_stream *strm)
228 {
229 	struct snd_soc_dai *dai = rz_ssi_get_dai(strm->substream);
230 	unsigned long flags;
231 
232 	spin_lock_irqsave(&ssi->lock, flags);
233 	strm->substream = NULL;
234 	spin_unlock_irqrestore(&ssi->lock, flags);
235 
236 	if (strm->oerr_num > 0)
237 		dev_info(dai->dev, "overrun = %d\n", strm->oerr_num);
238 
239 	if (strm->uerr_num > 0)
240 		dev_info(dai->dev, "underrun = %d\n", strm->uerr_num);
241 }
242 
rz_ssi_clk_setup(struct rz_ssi_priv * ssi,unsigned int rate,unsigned int channels)243 static int rz_ssi_clk_setup(struct rz_ssi_priv *ssi, unsigned int rate,
244 			    unsigned int channels)
245 {
246 	static s8 ckdv[16] = { 1,  2,  4,  8, 16, 32, 64, 128,
247 			       6, 12, 24, 48, 96, -1, -1, -1 };
248 	unsigned int channel_bits = 32;	/* System Word Length */
249 	unsigned long bclk_rate = rate * channels * channel_bits;
250 	unsigned int div;
251 	unsigned int i;
252 	u32 ssicr = 0;
253 	u32 clk_ckdv;
254 
255 	/* Clear AUCKE so we can set MST */
256 	rz_ssi_reg_writel(ssi, SSIFCR, 0);
257 
258 	/* Continue to output LRCK pin even when idle */
259 	rz_ssi_reg_writel(ssi, SSIOFR, SSIOFR_LRCONT);
260 	if (ssi->audio_clk_1 && ssi->audio_clk_2) {
261 		if (ssi->audio_clk_1 % bclk_rate)
262 			ssi->audio_mck = ssi->audio_clk_2;
263 		else
264 			ssi->audio_mck = ssi->audio_clk_1;
265 	}
266 
267 	/* Clock setting */
268 	ssicr |= SSICR_MST;
269 	if (ssi->audio_mck == ssi->audio_clk_1)
270 		ssicr |= SSICR_CKS;
271 	if (ssi->bckp_rise)
272 		ssicr |= SSICR_BCKP;
273 	if (ssi->lrckp_fsync_fall)
274 		ssicr |= SSICR_LRCKP;
275 
276 	/* Determine the clock divider */
277 	clk_ckdv = 0;
278 	div = ssi->audio_mck / bclk_rate;
279 	/* try to find an match */
280 	for (i = 0; i < ARRAY_SIZE(ckdv); i++) {
281 		if (ckdv[i] == div) {
282 			clk_ckdv = i;
283 			break;
284 		}
285 	}
286 
287 	if (i == ARRAY_SIZE(ckdv)) {
288 		dev_err(ssi->dev, "Rate not divisible by audio clock source\n");
289 		return -EINVAL;
290 	}
291 
292 	/*
293 	 * DWL: Data Word Length = 16 bits
294 	 * SWL: System Word Length = 32 bits
295 	 */
296 	ssicr |= SSICR_CKDV(clk_ckdv);
297 	ssicr |= SSICR_DWL(1) | SSICR_SWL(3);
298 	rz_ssi_reg_writel(ssi, SSICR, ssicr);
299 	rz_ssi_reg_writel(ssi, SSIFCR,
300 			  (SSIFCR_AUCKE | SSIFCR_TFRST | SSIFCR_RFRST));
301 
302 	return 0;
303 }
304 
rz_ssi_start(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)305 static int rz_ssi_start(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
306 {
307 	bool is_play = rz_ssi_stream_is_play(ssi, strm->substream);
308 	u32 ssicr, ssifcr;
309 
310 	ssicr = rz_ssi_reg_readl(ssi, SSICR);
311 	ssifcr = rz_ssi_reg_readl(ssi, SSIFCR) & ~0xF;
312 
313 	/* FIFO interrupt thresholds */
314 	if (rz_ssi_is_dma_enabled(ssi))
315 		rz_ssi_reg_writel(ssi, SSISCR, 0);
316 	else
317 		rz_ssi_reg_writel(ssi, SSISCR,
318 				  SSISCR_TDES(strm->fifo_sample_size / 2 - 1) |
319 				  SSISCR_RDFS(0));
320 
321 	/* enable IRQ */
322 	if (is_play) {
323 		ssicr |= SSICR_TUIEN | SSICR_TOIEN;
324 		ssifcr |= SSIFCR_TIE | SSIFCR_RFRST;
325 	} else {
326 		ssicr |= SSICR_RUIEN | SSICR_ROIEN;
327 		ssifcr |= SSIFCR_RIE | SSIFCR_TFRST;
328 	}
329 
330 	rz_ssi_reg_writel(ssi, SSICR, ssicr);
331 	rz_ssi_reg_writel(ssi, SSIFCR, ssifcr);
332 
333 	/* Clear all error flags */
334 	rz_ssi_reg_mask_setl(ssi, SSISR,
335 			     (SSISR_TOIRQ | SSISR_TUIRQ | SSISR_ROIRQ |
336 			      SSISR_RUIRQ), 0);
337 
338 	strm->running = 1;
339 	ssicr |= is_play ? SSICR_TEN : SSICR_REN;
340 	rz_ssi_reg_writel(ssi, SSICR, ssicr);
341 
342 	return 0;
343 }
344 
rz_ssi_stop(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)345 static int rz_ssi_stop(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
346 {
347 	int timeout;
348 
349 	strm->running = 0;
350 
351 	/* Disable TX/RX */
352 	rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TEN | SSICR_REN, 0);
353 
354 	/* Cancel all remaining DMA transactions */
355 	if (rz_ssi_is_dma_enabled(ssi))
356 		dmaengine_terminate_async(strm->dma_ch);
357 
358 	/* Disable irqs */
359 	rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TUIEN | SSICR_TOIEN |
360 			     SSICR_RUIEN | SSICR_ROIEN, 0);
361 	rz_ssi_reg_mask_setl(ssi, SSIFCR, SSIFCR_TIE | SSIFCR_RIE, 0);
362 
363 	/* Clear all error flags */
364 	rz_ssi_reg_mask_setl(ssi, SSISR,
365 			     (SSISR_TOIRQ | SSISR_TUIRQ | SSISR_ROIRQ |
366 			      SSISR_RUIRQ), 0);
367 
368 	/* Wait for idle */
369 	timeout = 100;
370 	while (--timeout) {
371 		if (rz_ssi_reg_readl(ssi, SSISR) & SSISR_IIRQ)
372 			break;
373 		udelay(1);
374 	}
375 
376 	if (!timeout)
377 		dev_info(ssi->dev, "timeout waiting for SSI idle\n");
378 
379 	/* Hold FIFOs in reset */
380 	rz_ssi_reg_mask_setl(ssi, SSIFCR, 0,
381 			     SSIFCR_TFRST | SSIFCR_RFRST);
382 
383 	return 0;
384 }
385 
rz_ssi_pointer_update(struct rz_ssi_stream * strm,int frames)386 static void rz_ssi_pointer_update(struct rz_ssi_stream *strm, int frames)
387 {
388 	struct snd_pcm_substream *substream = strm->substream;
389 	struct snd_pcm_runtime *runtime;
390 	int current_period;
391 
392 	if (!strm->running || !substream || !substream->runtime)
393 		return;
394 
395 	runtime = substream->runtime;
396 	strm->buffer_pos += frames;
397 	WARN_ON(strm->buffer_pos > runtime->buffer_size);
398 
399 	/* ring buffer */
400 	if (strm->buffer_pos == runtime->buffer_size)
401 		strm->buffer_pos = 0;
402 
403 	current_period = strm->buffer_pos / runtime->period_size;
404 	if (strm->period_counter != current_period) {
405 		snd_pcm_period_elapsed(strm->substream);
406 		strm->period_counter = current_period;
407 	}
408 }
409 
rz_ssi_pio_recv(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)410 static int rz_ssi_pio_recv(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
411 {
412 	struct snd_pcm_substream *substream = strm->substream;
413 	struct snd_pcm_runtime *runtime;
414 	bool done = false;
415 	u16 *buf;
416 	int fifo_samples;
417 	int frames_left;
418 	int samples;
419 	int i;
420 
421 	if (!rz_ssi_stream_is_valid(ssi, strm))
422 		return -EINVAL;
423 
424 	runtime = substream->runtime;
425 
426 	while (!done) {
427 		/* frames left in this period */
428 		frames_left = runtime->period_size -
429 			      (strm->buffer_pos % runtime->period_size);
430 		if (!frames_left)
431 			frames_left = runtime->period_size;
432 
433 		/* Samples in RX FIFO */
434 		fifo_samples = (rz_ssi_reg_readl(ssi, SSIFSR) >>
435 				SSIFSR_RDC_SHIFT) & SSIFSR_RDC_MASK;
436 
437 		/* Only read full frames at a time */
438 		samples = 0;
439 		while (frames_left && (fifo_samples >= runtime->channels)) {
440 			samples += runtime->channels;
441 			fifo_samples -= runtime->channels;
442 			frames_left--;
443 		}
444 
445 		/* not enough samples yet */
446 		if (!samples)
447 			break;
448 
449 		/* calculate new buffer index */
450 		buf = (u16 *)(runtime->dma_area);
451 		buf += strm->buffer_pos * runtime->channels;
452 
453 		/* Note, only supports 16-bit samples */
454 		for (i = 0; i < samples; i++)
455 			*buf++ = (u16)(rz_ssi_reg_readl(ssi, SSIFRDR) >> 16);
456 
457 		rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0);
458 		rz_ssi_pointer_update(strm, samples / runtime->channels);
459 
460 		/* check if there are no more samples in the RX FIFO */
461 		if (!(!frames_left && fifo_samples >= runtime->channels))
462 			done = true;
463 	}
464 
465 	return 0;
466 }
467 
rz_ssi_pio_send(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)468 static int rz_ssi_pio_send(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
469 {
470 	struct snd_pcm_substream *substream = strm->substream;
471 	struct snd_pcm_runtime *runtime = substream->runtime;
472 	int sample_space;
473 	int samples = 0;
474 	int frames_left;
475 	int i;
476 	u32 ssifsr;
477 	u16 *buf;
478 
479 	if (!rz_ssi_stream_is_valid(ssi, strm))
480 		return -EINVAL;
481 
482 	/* frames left in this period */
483 	frames_left = runtime->period_size - (strm->buffer_pos %
484 					      runtime->period_size);
485 	if (frames_left == 0)
486 		frames_left = runtime->period_size;
487 
488 	sample_space = strm->fifo_sample_size;
489 	ssifsr = rz_ssi_reg_readl(ssi, SSIFSR);
490 	sample_space -= (ssifsr >> SSIFSR_TDC_SHIFT) & SSIFSR_TDC_MASK;
491 
492 	/* Only add full frames at a time */
493 	while (frames_left && (sample_space >= runtime->channels)) {
494 		samples += runtime->channels;
495 		sample_space -= runtime->channels;
496 		frames_left--;
497 	}
498 
499 	/* no space to send anything right now */
500 	if (samples == 0)
501 		return 0;
502 
503 	/* calculate new buffer index */
504 	buf = (u16 *)(runtime->dma_area);
505 	buf += strm->buffer_pos * runtime->channels;
506 
507 	/* Note, only supports 16-bit samples */
508 	for (i = 0; i < samples; i++)
509 		rz_ssi_reg_writel(ssi, SSIFTDR, ((u32)(*buf++) << 16));
510 
511 	rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_TDE, 0);
512 	rz_ssi_pointer_update(strm, samples / runtime->channels);
513 
514 	return 0;
515 }
516 
rz_ssi_interrupt(int irq,void * data)517 static irqreturn_t rz_ssi_interrupt(int irq, void *data)
518 {
519 	struct rz_ssi_stream *strm = NULL;
520 	struct rz_ssi_priv *ssi = data;
521 	u32 ssisr = rz_ssi_reg_readl(ssi, SSISR);
522 
523 	if (ssi->playback.substream)
524 		strm = &ssi->playback;
525 	else if (ssi->capture.substream)
526 		strm = &ssi->capture;
527 	else
528 		return IRQ_HANDLED; /* Left over TX/RX interrupt */
529 
530 	if (irq == ssi->irq_int) { /* error or idle */
531 		if (ssisr & SSISR_TUIRQ)
532 			strm->uerr_num++;
533 		if (ssisr & SSISR_TOIRQ)
534 			strm->oerr_num++;
535 		if (ssisr & SSISR_RUIRQ)
536 			strm->uerr_num++;
537 		if (ssisr & SSISR_ROIRQ)
538 			strm->oerr_num++;
539 
540 		if (ssisr & (SSISR_TUIRQ | SSISR_TOIRQ | SSISR_RUIRQ |
541 			     SSISR_ROIRQ)) {
542 			/* Error handling */
543 			/* You must reset (stop/restart) after each interrupt */
544 			rz_ssi_stop(ssi, strm);
545 
546 			/* Clear all flags */
547 			rz_ssi_reg_mask_setl(ssi, SSISR, SSISR_TOIRQ |
548 					     SSISR_TUIRQ | SSISR_ROIRQ |
549 					     SSISR_RUIRQ, 0);
550 
551 			/* Add/remove more data */
552 			strm->transfer(ssi, strm);
553 
554 			/* Resume */
555 			rz_ssi_start(ssi, strm);
556 		}
557 	}
558 
559 	if (!strm->running)
560 		return IRQ_HANDLED;
561 
562 	/* tx data empty */
563 	if (irq == ssi->irq_tx)
564 		strm->transfer(ssi, &ssi->playback);
565 
566 	/* rx data full */
567 	if (irq == ssi->irq_rx) {
568 		strm->transfer(ssi, &ssi->capture);
569 		rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0);
570 	}
571 
572 	return IRQ_HANDLED;
573 }
574 
rz_ssi_dma_slave_config(struct rz_ssi_priv * ssi,struct dma_chan * dma_ch,bool is_play)575 static int rz_ssi_dma_slave_config(struct rz_ssi_priv *ssi,
576 				   struct dma_chan *dma_ch, bool is_play)
577 {
578 	struct dma_slave_config cfg;
579 
580 	memset(&cfg, 0, sizeof(cfg));
581 
582 	cfg.direction = is_play ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM;
583 	cfg.dst_addr = ssi->phys + SSIFTDR;
584 	cfg.src_addr = ssi->phys + SSIFRDR;
585 	cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
586 	cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
587 
588 	return dmaengine_slave_config(dma_ch, &cfg);
589 }
590 
rz_ssi_dma_transfer(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)591 static int rz_ssi_dma_transfer(struct rz_ssi_priv *ssi,
592 			       struct rz_ssi_stream *strm)
593 {
594 	struct snd_pcm_substream *substream = strm->substream;
595 	struct dma_async_tx_descriptor *desc;
596 	struct snd_pcm_runtime *runtime;
597 	enum dma_transfer_direction dir;
598 	u32 dma_paddr, dma_size;
599 	int amount;
600 
601 	if (!rz_ssi_stream_is_valid(ssi, strm))
602 		return -EINVAL;
603 
604 	runtime = substream->runtime;
605 	if (runtime->status->state == SNDRV_PCM_STATE_DRAINING)
606 		/*
607 		 * Stream is ending, so do not queue up any more DMA
608 		 * transfers otherwise we play partial sound clips
609 		 * because we can't shut off the DMA quick enough.
610 		 */
611 		return 0;
612 
613 	dir = rz_ssi_stream_is_play(ssi, substream) ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM;
614 
615 	/* Always transfer 1 period */
616 	amount = runtime->period_size;
617 
618 	/* DMA physical address and size */
619 	dma_paddr = runtime->dma_addr + frames_to_bytes(runtime,
620 							strm->dma_buffer_pos);
621 	dma_size = frames_to_bytes(runtime, amount);
622 	desc = dmaengine_prep_slave_single(strm->dma_ch, dma_paddr, dma_size,
623 					   dir,
624 					   DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
625 	if (!desc) {
626 		dev_err(ssi->dev, "dmaengine_prep_slave_single() fail\n");
627 		return -ENOMEM;
628 	}
629 
630 	desc->callback = rz_ssi_dma_complete;
631 	desc->callback_param = strm;
632 
633 	if (dmaengine_submit(desc) < 0) {
634 		dev_err(ssi->dev, "dmaengine_submit() fail\n");
635 		return -EIO;
636 	}
637 
638 	/* Update DMA pointer */
639 	strm->dma_buffer_pos += amount;
640 	if (strm->dma_buffer_pos >= runtime->buffer_size)
641 		strm->dma_buffer_pos = 0;
642 
643 	/* Start DMA */
644 	dma_async_issue_pending(strm->dma_ch);
645 
646 	return 0;
647 }
648 
rz_ssi_dma_complete(void * data)649 static void rz_ssi_dma_complete(void *data)
650 {
651 	struct rz_ssi_stream *strm = (struct rz_ssi_stream *)data;
652 
653 	if (!strm->running || !strm->substream || !strm->substream->runtime)
654 		return;
655 
656 	/* Note that next DMA transaction has probably already started */
657 	rz_ssi_pointer_update(strm, strm->substream->runtime->period_size);
658 
659 	/* Queue up another DMA transaction */
660 	rz_ssi_dma_transfer(strm->priv, strm);
661 }
662 
rz_ssi_release_dma_channels(struct rz_ssi_priv * ssi)663 static void rz_ssi_release_dma_channels(struct rz_ssi_priv *ssi)
664 {
665 	if (ssi->playback.dma_ch) {
666 		dma_release_channel(ssi->playback.dma_ch);
667 		ssi->playback.dma_ch = NULL;
668 		if (ssi->dma_rt)
669 			ssi->dma_rt = false;
670 	}
671 
672 	if (ssi->capture.dma_ch) {
673 		dma_release_channel(ssi->capture.dma_ch);
674 		ssi->capture.dma_ch = NULL;
675 	}
676 }
677 
rz_ssi_dma_request(struct rz_ssi_priv * ssi,struct device * dev)678 static int rz_ssi_dma_request(struct rz_ssi_priv *ssi, struct device *dev)
679 {
680 	ssi->playback.dma_ch = dma_request_chan(dev, "tx");
681 	if (IS_ERR(ssi->playback.dma_ch))
682 		ssi->playback.dma_ch = NULL;
683 
684 	ssi->capture.dma_ch = dma_request_chan(dev, "rx");
685 	if (IS_ERR(ssi->capture.dma_ch))
686 		ssi->capture.dma_ch = NULL;
687 
688 	if (!ssi->playback.dma_ch && !ssi->capture.dma_ch) {
689 		ssi->playback.dma_ch = dma_request_chan(dev, "rt");
690 		if (IS_ERR(ssi->playback.dma_ch)) {
691 			ssi->playback.dma_ch = NULL;
692 			goto no_dma;
693 		}
694 
695 		ssi->dma_rt = true;
696 	}
697 
698 	if (!rz_ssi_is_dma_enabled(ssi))
699 		goto no_dma;
700 
701 	if (ssi->playback.dma_ch &&
702 	    (rz_ssi_dma_slave_config(ssi, ssi->playback.dma_ch, true) < 0))
703 		goto no_dma;
704 
705 	if (ssi->capture.dma_ch &&
706 	    (rz_ssi_dma_slave_config(ssi, ssi->capture.dma_ch, false) < 0))
707 		goto no_dma;
708 
709 	return 0;
710 
711 no_dma:
712 	rz_ssi_release_dma_channels(ssi);
713 
714 	return -ENODEV;
715 }
716 
rz_ssi_dai_trigger(struct snd_pcm_substream * substream,int cmd,struct snd_soc_dai * dai)717 static int rz_ssi_dai_trigger(struct snd_pcm_substream *substream, int cmd,
718 			      struct snd_soc_dai *dai)
719 {
720 	struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
721 	struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream);
722 	int ret = 0, i, num_transfer = 1;
723 
724 	switch (cmd) {
725 	case SNDRV_PCM_TRIGGER_START:
726 		/* Soft Reset */
727 		rz_ssi_reg_mask_setl(ssi, SSIFCR, 0, SSIFCR_SSIRST);
728 		rz_ssi_reg_mask_setl(ssi, SSIFCR, SSIFCR_SSIRST, 0);
729 		udelay(5);
730 
731 		ret = rz_ssi_stream_init(ssi, strm, substream);
732 		if (ret)
733 			goto done;
734 
735 		if (ssi->dma_rt) {
736 			bool is_playback;
737 
738 			is_playback = rz_ssi_stream_is_play(ssi, substream);
739 			ret = rz_ssi_dma_slave_config(ssi, ssi->playback.dma_ch,
740 						      is_playback);
741 			/* Fallback to pio */
742 			if (ret < 0) {
743 				ssi->playback.transfer = rz_ssi_pio_send;
744 				ssi->capture.transfer = rz_ssi_pio_recv;
745 				rz_ssi_release_dma_channels(ssi);
746 			}
747 		}
748 
749 		/* For DMA, queue up multiple DMA descriptors */
750 		if (rz_ssi_is_dma_enabled(ssi))
751 			num_transfer = 4;
752 
753 		for (i = 0; i < num_transfer; i++) {
754 			ret = strm->transfer(ssi, strm);
755 			if (ret)
756 				goto done;
757 		}
758 
759 		ret = rz_ssi_start(ssi, strm);
760 		break;
761 	case SNDRV_PCM_TRIGGER_STOP:
762 		rz_ssi_stop(ssi, strm);
763 		rz_ssi_stream_quit(ssi, strm);
764 		break;
765 	}
766 
767 done:
768 	return ret;
769 }
770 
rz_ssi_dai_set_fmt(struct snd_soc_dai * dai,unsigned int fmt)771 static int rz_ssi_dai_set_fmt(struct snd_soc_dai *dai, unsigned int fmt)
772 {
773 	struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
774 
775 	switch (fmt & SND_SOC_DAIFMT_CLOCK_PROVIDER_MASK) {
776 	case SND_SOC_DAIFMT_CBC_CFC:
777 		break;
778 	default:
779 		dev_err(ssi->dev, "Codec should be clk and frame consumer\n");
780 		return -EINVAL;
781 	}
782 
783 	/*
784 	 * set clock polarity
785 	 *
786 	 * "normal" BCLK = Signal is available at rising edge of BCLK
787 	 * "normal" FSYNC = (I2S) Left ch starts with falling FSYNC edge
788 	 */
789 	switch (fmt & SND_SOC_DAIFMT_INV_MASK) {
790 	case SND_SOC_DAIFMT_NB_NF:
791 		ssi->bckp_rise = false;
792 		ssi->lrckp_fsync_fall = false;
793 		break;
794 	case SND_SOC_DAIFMT_NB_IF:
795 		ssi->bckp_rise = false;
796 		ssi->lrckp_fsync_fall = true;
797 		break;
798 	case SND_SOC_DAIFMT_IB_NF:
799 		ssi->bckp_rise = true;
800 		ssi->lrckp_fsync_fall = false;
801 		break;
802 	case SND_SOC_DAIFMT_IB_IF:
803 		ssi->bckp_rise = true;
804 		ssi->lrckp_fsync_fall = true;
805 		break;
806 	default:
807 		return -EINVAL;
808 	}
809 
810 	/* only i2s support */
811 	switch (fmt & SND_SOC_DAIFMT_FORMAT_MASK) {
812 	case SND_SOC_DAIFMT_I2S:
813 		break;
814 	default:
815 		dev_err(ssi->dev, "Only I2S mode is supported.\n");
816 		return -EINVAL;
817 	}
818 
819 	return 0;
820 }
821 
rz_ssi_dai_hw_params(struct snd_pcm_substream * substream,struct snd_pcm_hw_params * params,struct snd_soc_dai * dai)822 static int rz_ssi_dai_hw_params(struct snd_pcm_substream *substream,
823 				struct snd_pcm_hw_params *params,
824 				struct snd_soc_dai *dai)
825 {
826 	struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
827 	unsigned int sample_bits = hw_param_interval(params,
828 					SNDRV_PCM_HW_PARAM_SAMPLE_BITS)->min;
829 	unsigned int channels = params_channels(params);
830 
831 	if (sample_bits != 16) {
832 		dev_err(ssi->dev, "Unsupported sample width: %d\n",
833 			sample_bits);
834 		return -EINVAL;
835 	}
836 
837 	if (channels != 2) {
838 		dev_err(ssi->dev, "Number of channels not matched: %d\n",
839 			channels);
840 		return -EINVAL;
841 	}
842 
843 	return rz_ssi_clk_setup(ssi, params_rate(params),
844 				params_channels(params));
845 }
846 
847 static const struct snd_soc_dai_ops rz_ssi_dai_ops = {
848 	.trigger	= rz_ssi_dai_trigger,
849 	.set_fmt	= rz_ssi_dai_set_fmt,
850 	.hw_params	= rz_ssi_dai_hw_params,
851 };
852 
853 static const struct snd_pcm_hardware rz_ssi_pcm_hardware = {
854 	.info			= SNDRV_PCM_INFO_INTERLEAVED	|
855 				  SNDRV_PCM_INFO_MMAP		|
856 				  SNDRV_PCM_INFO_MMAP_VALID,
857 	.buffer_bytes_max	= PREALLOC_BUFFER,
858 	.period_bytes_min	= 32,
859 	.period_bytes_max	= 8192,
860 	.channels_min		= SSI_CHAN_MIN,
861 	.channels_max		= SSI_CHAN_MAX,
862 	.periods_min		= 1,
863 	.periods_max		= 32,
864 	.fifo_size		= 32 * 2,
865 };
866 
rz_ssi_pcm_open(struct snd_soc_component * component,struct snd_pcm_substream * substream)867 static int rz_ssi_pcm_open(struct snd_soc_component *component,
868 			   struct snd_pcm_substream *substream)
869 {
870 	snd_soc_set_runtime_hwparams(substream, &rz_ssi_pcm_hardware);
871 
872 	return snd_pcm_hw_constraint_integer(substream->runtime,
873 					    SNDRV_PCM_HW_PARAM_PERIODS);
874 }
875 
rz_ssi_pcm_pointer(struct snd_soc_component * component,struct snd_pcm_substream * substream)876 static snd_pcm_uframes_t rz_ssi_pcm_pointer(struct snd_soc_component *component,
877 					    struct snd_pcm_substream *substream)
878 {
879 	struct snd_soc_dai *dai = rz_ssi_get_dai(substream);
880 	struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
881 	struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream);
882 
883 	return strm->buffer_pos;
884 }
885 
rz_ssi_pcm_new(struct snd_soc_component * component,struct snd_soc_pcm_runtime * rtd)886 static int rz_ssi_pcm_new(struct snd_soc_component *component,
887 			  struct snd_soc_pcm_runtime *rtd)
888 {
889 	snd_pcm_set_managed_buffer_all(rtd->pcm, SNDRV_DMA_TYPE_DEV,
890 				       rtd->card->snd_card->dev,
891 				       PREALLOC_BUFFER, PREALLOC_BUFFER_MAX);
892 	return 0;
893 }
894 
895 static struct snd_soc_dai_driver rz_ssi_soc_dai[] = {
896 	{
897 		.name			= "rz-ssi-dai",
898 		.playback = {
899 			.rates		= SSI_RATES,
900 			.formats	= SSI_FMTS,
901 			.channels_min	= SSI_CHAN_MIN,
902 			.channels_max	= SSI_CHAN_MAX,
903 		},
904 		.capture = {
905 			.rates		= SSI_RATES,
906 			.formats	= SSI_FMTS,
907 			.channels_min	= SSI_CHAN_MIN,
908 			.channels_max	= SSI_CHAN_MAX,
909 		},
910 		.ops = &rz_ssi_dai_ops,
911 	},
912 };
913 
914 static const struct snd_soc_component_driver rz_ssi_soc_component = {
915 	.name		= "rz-ssi",
916 	.open		= rz_ssi_pcm_open,
917 	.pointer	= rz_ssi_pcm_pointer,
918 	.pcm_construct	= rz_ssi_pcm_new,
919 };
920 
rz_ssi_probe(struct platform_device * pdev)921 static int rz_ssi_probe(struct platform_device *pdev)
922 {
923 	struct rz_ssi_priv *ssi;
924 	struct clk *audio_clk;
925 	struct resource *res;
926 	int ret;
927 
928 	ssi = devm_kzalloc(&pdev->dev, sizeof(*ssi), GFP_KERNEL);
929 	if (!ssi)
930 		return -ENOMEM;
931 
932 	ssi->pdev = pdev;
933 	ssi->dev = &pdev->dev;
934 	ssi->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res);
935 	if (IS_ERR(ssi->base))
936 		return PTR_ERR(ssi->base);
937 
938 	ssi->phys = res->start;
939 	ssi->clk = devm_clk_get(&pdev->dev, "ssi");
940 	if (IS_ERR(ssi->clk))
941 		return PTR_ERR(ssi->clk);
942 
943 	ssi->sfr_clk = devm_clk_get(&pdev->dev, "ssi_sfr");
944 	if (IS_ERR(ssi->sfr_clk))
945 		return PTR_ERR(ssi->sfr_clk);
946 
947 	audio_clk = devm_clk_get(&pdev->dev, "audio_clk1");
948 	if (IS_ERR(audio_clk))
949 		return dev_err_probe(&pdev->dev, PTR_ERR(audio_clk),
950 				     "no audio clk1");
951 
952 	ssi->audio_clk_1 = clk_get_rate(audio_clk);
953 	audio_clk = devm_clk_get(&pdev->dev, "audio_clk2");
954 	if (IS_ERR(audio_clk))
955 		return dev_err_probe(&pdev->dev, PTR_ERR(audio_clk),
956 				     "no audio clk2");
957 
958 	ssi->audio_clk_2 = clk_get_rate(audio_clk);
959 	if (!(ssi->audio_clk_1 || ssi->audio_clk_2))
960 		return dev_err_probe(&pdev->dev, -EINVAL,
961 				     "no audio clk1 or audio clk2");
962 
963 	ssi->audio_mck = ssi->audio_clk_1 ? ssi->audio_clk_1 : ssi->audio_clk_2;
964 
965 	/* Detect DMA support */
966 	ret = rz_ssi_dma_request(ssi, &pdev->dev);
967 	if (ret < 0) {
968 		dev_warn(&pdev->dev, "DMA not available, using PIO\n");
969 		ssi->playback.transfer = rz_ssi_pio_send;
970 		ssi->capture.transfer = rz_ssi_pio_recv;
971 	} else {
972 		dev_info(&pdev->dev, "DMA enabled");
973 		ssi->playback.transfer = rz_ssi_dma_transfer;
974 		ssi->capture.transfer = rz_ssi_dma_transfer;
975 	}
976 
977 	ssi->playback.priv = ssi;
978 	ssi->capture.priv = ssi;
979 
980 	spin_lock_init(&ssi->lock);
981 	dev_set_drvdata(&pdev->dev, ssi);
982 
983 	/* Error Interrupt */
984 	ssi->irq_int = platform_get_irq_byname(pdev, "int_req");
985 	if (ssi->irq_int < 0) {
986 		rz_ssi_release_dma_channels(ssi);
987 		return ssi->irq_int;
988 	}
989 
990 	ret = devm_request_irq(&pdev->dev, ssi->irq_int, &rz_ssi_interrupt,
991 			       0, dev_name(&pdev->dev), ssi);
992 	if (ret < 0) {
993 		rz_ssi_release_dma_channels(ssi);
994 		return dev_err_probe(&pdev->dev, ret,
995 				     "irq request error (int_req)\n");
996 	}
997 
998 	if (!rz_ssi_is_dma_enabled(ssi)) {
999 		/* Tx and Rx interrupts (pio only) */
1000 		ssi->irq_tx = platform_get_irq_byname(pdev, "dma_tx");
1001 		if (ssi->irq_tx < 0)
1002 			return ssi->irq_tx;
1003 
1004 		ret = devm_request_irq(&pdev->dev, ssi->irq_tx,
1005 				       &rz_ssi_interrupt, 0,
1006 				       dev_name(&pdev->dev), ssi);
1007 		if (ret < 0)
1008 			return dev_err_probe(&pdev->dev, ret,
1009 					     "irq request error (dma_tx)\n");
1010 
1011 		ssi->irq_rx = platform_get_irq_byname(pdev, "dma_rx");
1012 		if (ssi->irq_rx < 0)
1013 			return ssi->irq_rx;
1014 
1015 		ret = devm_request_irq(&pdev->dev, ssi->irq_rx,
1016 				       &rz_ssi_interrupt, 0,
1017 				       dev_name(&pdev->dev), ssi);
1018 		if (ret < 0)
1019 			return dev_err_probe(&pdev->dev, ret,
1020 					     "irq request error (dma_rx)\n");
1021 	}
1022 
1023 	ssi->rstc = devm_reset_control_get_exclusive(&pdev->dev, NULL);
1024 	if (IS_ERR(ssi->rstc)) {
1025 		ret = PTR_ERR(ssi->rstc);
1026 		goto err_reset;
1027 	}
1028 
1029 	reset_control_deassert(ssi->rstc);
1030 	pm_runtime_enable(&pdev->dev);
1031 	ret = pm_runtime_resume_and_get(&pdev->dev);
1032 	if (ret < 0) {
1033 		dev_err(&pdev->dev, "pm_runtime_resume_and_get failed\n");
1034 		goto err_pm;
1035 	}
1036 
1037 	ret = devm_snd_soc_register_component(&pdev->dev, &rz_ssi_soc_component,
1038 					      rz_ssi_soc_dai,
1039 					      ARRAY_SIZE(rz_ssi_soc_dai));
1040 	if (ret < 0) {
1041 		dev_err(&pdev->dev, "failed to register snd component\n");
1042 		goto err_snd_soc;
1043 	}
1044 
1045 	return 0;
1046 
1047 err_snd_soc:
1048 	pm_runtime_put(ssi->dev);
1049 err_pm:
1050 	pm_runtime_disable(ssi->dev);
1051 	reset_control_assert(ssi->rstc);
1052 err_reset:
1053 	rz_ssi_release_dma_channels(ssi);
1054 
1055 	return ret;
1056 }
1057 
rz_ssi_remove(struct platform_device * pdev)1058 static int rz_ssi_remove(struct platform_device *pdev)
1059 {
1060 	struct rz_ssi_priv *ssi = dev_get_drvdata(&pdev->dev);
1061 
1062 	rz_ssi_release_dma_channels(ssi);
1063 
1064 	pm_runtime_put(ssi->dev);
1065 	pm_runtime_disable(ssi->dev);
1066 	reset_control_assert(ssi->rstc);
1067 
1068 	return 0;
1069 }
1070 
1071 static const struct of_device_id rz_ssi_of_match[] = {
1072 	{ .compatible = "renesas,rz-ssi", },
1073 	{/* Sentinel */},
1074 };
1075 MODULE_DEVICE_TABLE(of, rz_ssi_of_match);
1076 
1077 static struct platform_driver rz_ssi_driver = {
1078 	.driver	= {
1079 		.name	= "rz-ssi-pcm-audio",
1080 		.of_match_table = rz_ssi_of_match,
1081 	},
1082 	.probe		= rz_ssi_probe,
1083 	.remove		= rz_ssi_remove,
1084 };
1085 
1086 module_platform_driver(rz_ssi_driver);
1087 
1088 MODULE_LICENSE("GPL v2");
1089 MODULE_DESCRIPTION("Renesas RZ/G2L ASoC Serial Sound Interface Driver");
1090 MODULE_AUTHOR("Biju Das <biju.das.jz@bp.renesas.com>");
1091