1 // SPDX-License-Identifier: GPL-2.0
2 //
3 // Freescale ASRC ALSA SoC Platform (DMA) driver
4 //
5 // Copyright (C) 2014 Freescale Semiconductor, Inc.
6 //
7 // Author: Nicolin Chen <nicoleotsuka@gmail.com>
8
9 #include <linux/dma-mapping.h>
10 #include <linux/module.h>
11 #include <linux/platform_data/dma-imx.h>
12 #include <sound/dmaengine_pcm.h>
13 #include <sound/pcm_params.h>
14
15 #include "fsl_asrc_common.h"
16
17 #define FSL_ASRC_DMABUF_SIZE (256 * 1024)
18
19 static struct snd_pcm_hardware snd_imx_hardware = {
20 .info = SNDRV_PCM_INFO_INTERLEAVED |
21 SNDRV_PCM_INFO_BLOCK_TRANSFER |
22 SNDRV_PCM_INFO_MMAP |
23 SNDRV_PCM_INFO_MMAP_VALID,
24 .buffer_bytes_max = FSL_ASRC_DMABUF_SIZE,
25 .period_bytes_min = 128,
26 .period_bytes_max = 65535, /* Limited by SDMA engine */
27 .periods_min = 2,
28 .periods_max = 255,
29 .fifo_size = 0,
30 };
31
filter(struct dma_chan * chan,void * param)32 static bool filter(struct dma_chan *chan, void *param)
33 {
34 if (!imx_dma_is_general_purpose(chan))
35 return false;
36
37 chan->private = param;
38
39 return true;
40 }
41
fsl_asrc_dma_complete(void * arg)42 static void fsl_asrc_dma_complete(void *arg)
43 {
44 struct snd_pcm_substream *substream = arg;
45 struct snd_pcm_runtime *runtime = substream->runtime;
46 struct fsl_asrc_pair *pair = runtime->private_data;
47
48 pair->pos += snd_pcm_lib_period_bytes(substream);
49 if (pair->pos >= snd_pcm_lib_buffer_bytes(substream))
50 pair->pos = 0;
51
52 snd_pcm_period_elapsed(substream);
53 }
54
fsl_asrc_dma_prepare_and_submit(struct snd_pcm_substream * substream,struct snd_soc_component * component)55 static int fsl_asrc_dma_prepare_and_submit(struct snd_pcm_substream *substream,
56 struct snd_soc_component *component)
57 {
58 u8 dir = substream->stream == SNDRV_PCM_STREAM_PLAYBACK ? OUT : IN;
59 struct snd_pcm_runtime *runtime = substream->runtime;
60 struct fsl_asrc_pair *pair = runtime->private_data;
61 struct device *dev = component->dev;
62 unsigned long flags = DMA_CTRL_ACK;
63
64 /* Prepare and submit Front-End DMA channel */
65 if (!substream->runtime->no_period_wakeup)
66 flags |= DMA_PREP_INTERRUPT;
67
68 pair->pos = 0;
69 pair->desc[!dir] = dmaengine_prep_dma_cyclic(
70 pair->dma_chan[!dir], runtime->dma_addr,
71 snd_pcm_lib_buffer_bytes(substream),
72 snd_pcm_lib_period_bytes(substream),
73 dir == OUT ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM, flags);
74 if (!pair->desc[!dir]) {
75 dev_err(dev, "failed to prepare slave DMA for Front-End\n");
76 return -ENOMEM;
77 }
78
79 pair->desc[!dir]->callback = fsl_asrc_dma_complete;
80 pair->desc[!dir]->callback_param = substream;
81
82 dmaengine_submit(pair->desc[!dir]);
83
84 /* Prepare and submit Back-End DMA channel */
85 pair->desc[dir] = dmaengine_prep_dma_cyclic(
86 pair->dma_chan[dir], 0xffff, 64, 64, DMA_DEV_TO_DEV, 0);
87 if (!pair->desc[dir]) {
88 dev_err(dev, "failed to prepare slave DMA for Back-End\n");
89 return -ENOMEM;
90 }
91
92 dmaengine_submit(pair->desc[dir]);
93
94 return 0;
95 }
96
fsl_asrc_dma_trigger(struct snd_soc_component * component,struct snd_pcm_substream * substream,int cmd)97 static int fsl_asrc_dma_trigger(struct snd_soc_component *component,
98 struct snd_pcm_substream *substream, int cmd)
99 {
100 struct snd_pcm_runtime *runtime = substream->runtime;
101 struct fsl_asrc_pair *pair = runtime->private_data;
102 int ret;
103
104 switch (cmd) {
105 case SNDRV_PCM_TRIGGER_START:
106 case SNDRV_PCM_TRIGGER_RESUME:
107 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
108 ret = fsl_asrc_dma_prepare_and_submit(substream, component);
109 if (ret)
110 return ret;
111 dma_async_issue_pending(pair->dma_chan[IN]);
112 dma_async_issue_pending(pair->dma_chan[OUT]);
113 break;
114 case SNDRV_PCM_TRIGGER_STOP:
115 case SNDRV_PCM_TRIGGER_SUSPEND:
116 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
117 dmaengine_terminate_all(pair->dma_chan[OUT]);
118 dmaengine_terminate_all(pair->dma_chan[IN]);
119 break;
120 default:
121 return -EINVAL;
122 }
123
124 return 0;
125 }
126
fsl_asrc_dma_hw_params(struct snd_soc_component * component,struct snd_pcm_substream * substream,struct snd_pcm_hw_params * params)127 static int fsl_asrc_dma_hw_params(struct snd_soc_component *component,
128 struct snd_pcm_substream *substream,
129 struct snd_pcm_hw_params *params)
130 {
131 enum dma_slave_buswidth buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
132 struct snd_soc_pcm_runtime *rtd = asoc_substream_to_rtd(substream);
133 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
134 struct snd_dmaengine_dai_dma_data *dma_params_fe = NULL;
135 struct snd_dmaengine_dai_dma_data *dma_params_be = NULL;
136 struct snd_pcm_runtime *runtime = substream->runtime;
137 struct fsl_asrc_pair *pair = runtime->private_data;
138 struct dma_chan *tmp_chan = NULL, *be_chan = NULL;
139 struct snd_soc_component *component_be = NULL;
140 struct fsl_asrc *asrc = pair->asrc;
141 struct dma_slave_config config_fe, config_be;
142 enum asrc_pair_index index = pair->index;
143 struct device *dev = component->dev;
144 int stream = substream->stream;
145 struct imx_dma_data *tmp_data;
146 struct snd_soc_dpcm *dpcm;
147 struct device *dev_be;
148 u8 dir = tx ? OUT : IN;
149 dma_cap_mask_t mask;
150 int ret, width;
151
152 /* Fetch the Back-End dma_data from DPCM */
153 for_each_dpcm_be(rtd, stream, dpcm) {
154 struct snd_soc_pcm_runtime *be = dpcm->be;
155 struct snd_pcm_substream *substream_be;
156 struct snd_soc_dai *dai = asoc_rtd_to_cpu(be, 0);
157
158 if (dpcm->fe != rtd)
159 continue;
160
161 substream_be = snd_soc_dpcm_get_substream(be, stream);
162 dma_params_be = snd_soc_dai_get_dma_data(dai, substream_be);
163 dev_be = dai->dev;
164 break;
165 }
166
167 if (!dma_params_be) {
168 dev_err(dev, "failed to get the substream of Back-End\n");
169 return -EINVAL;
170 }
171
172 /* Override dma_data of the Front-End and config its dmaengine */
173 dma_params_fe = snd_soc_dai_get_dma_data(asoc_rtd_to_cpu(rtd, 0), substream);
174 dma_params_fe->addr = asrc->paddr + asrc->get_fifo_addr(!dir, index);
175 dma_params_fe->maxburst = dma_params_be->maxburst;
176
177 pair->dma_chan[!dir] = asrc->get_dma_channel(pair, !dir);
178 if (!pair->dma_chan[!dir]) {
179 dev_err(dev, "failed to request DMA channel\n");
180 return -EINVAL;
181 }
182
183 memset(&config_fe, 0, sizeof(config_fe));
184 ret = snd_dmaengine_pcm_prepare_slave_config(substream, params, &config_fe);
185 if (ret) {
186 dev_err(dev, "failed to prepare DMA config for Front-End\n");
187 return ret;
188 }
189
190 ret = dmaengine_slave_config(pair->dma_chan[!dir], &config_fe);
191 if (ret) {
192 dev_err(dev, "failed to config DMA channel for Front-End\n");
193 return ret;
194 }
195
196 /* Request and config DMA channel for Back-End */
197 dma_cap_zero(mask);
198 dma_cap_set(DMA_SLAVE, mask);
199 dma_cap_set(DMA_CYCLIC, mask);
200
201 /*
202 * The Back-End device might have already requested a DMA channel,
203 * so try to reuse it first, and then request a new one upon NULL.
204 */
205 component_be = snd_soc_lookup_component_nolocked(dev_be, SND_DMAENGINE_PCM_DRV_NAME);
206 if (component_be) {
207 be_chan = soc_component_to_pcm(component_be)->chan[substream->stream];
208 tmp_chan = be_chan;
209 }
210 if (!tmp_chan)
211 tmp_chan = dma_request_slave_channel(dev_be, tx ? "tx" : "rx");
212
213 /*
214 * An EDMA DEV_TO_DEV channel is fixed and bound with DMA event of each
215 * peripheral, unlike SDMA channel that is allocated dynamically. So no
216 * need to configure dma_request and dma_request2, but get dma_chan of
217 * Back-End device directly via dma_request_slave_channel.
218 */
219 if (!asrc->use_edma) {
220 /* Get DMA request of Back-End */
221 tmp_data = tmp_chan->private;
222 pair->dma_data.dma_request = tmp_data->dma_request;
223 if (!be_chan)
224 dma_release_channel(tmp_chan);
225
226 /* Get DMA request of Front-End */
227 tmp_chan = asrc->get_dma_channel(pair, dir);
228 tmp_data = tmp_chan->private;
229 pair->dma_data.dma_request2 = tmp_data->dma_request;
230 pair->dma_data.peripheral_type = tmp_data->peripheral_type;
231 pair->dma_data.priority = tmp_data->priority;
232 dma_release_channel(tmp_chan);
233
234 pair->dma_chan[dir] =
235 dma_request_channel(mask, filter, &pair->dma_data);
236 pair->req_dma_chan = true;
237 } else {
238 pair->dma_chan[dir] = tmp_chan;
239 /* Do not flag to release if we are reusing the Back-End one */
240 pair->req_dma_chan = !be_chan;
241 }
242
243 if (!pair->dma_chan[dir]) {
244 dev_err(dev, "failed to request DMA channel for Back-End\n");
245 return -EINVAL;
246 }
247
248 width = snd_pcm_format_physical_width(asrc->asrc_format);
249 if (width < 8 || width > 64)
250 return -EINVAL;
251 else if (width == 8)
252 buswidth = DMA_SLAVE_BUSWIDTH_1_BYTE;
253 else if (width == 16)
254 buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
255 else if (width == 24)
256 buswidth = DMA_SLAVE_BUSWIDTH_3_BYTES;
257 else if (width <= 32)
258 buswidth = DMA_SLAVE_BUSWIDTH_4_BYTES;
259 else
260 buswidth = DMA_SLAVE_BUSWIDTH_8_BYTES;
261
262 config_be.direction = DMA_DEV_TO_DEV;
263 config_be.src_addr_width = buswidth;
264 config_be.src_maxburst = dma_params_be->maxburst;
265 config_be.dst_addr_width = buswidth;
266 config_be.dst_maxburst = dma_params_be->maxburst;
267
268 if (tx) {
269 config_be.src_addr = asrc->paddr + asrc->get_fifo_addr(OUT, index);
270 config_be.dst_addr = dma_params_be->addr;
271 } else {
272 config_be.dst_addr = asrc->paddr + asrc->get_fifo_addr(IN, index);
273 config_be.src_addr = dma_params_be->addr;
274 }
275
276 ret = dmaengine_slave_config(pair->dma_chan[dir], &config_be);
277 if (ret) {
278 dev_err(dev, "failed to config DMA channel for Back-End\n");
279 if (pair->req_dma_chan)
280 dma_release_channel(pair->dma_chan[dir]);
281 return ret;
282 }
283
284 snd_pcm_set_runtime_buffer(substream, &substream->dma_buffer);
285
286 return 0;
287 }
288
fsl_asrc_dma_hw_free(struct snd_soc_component * component,struct snd_pcm_substream * substream)289 static int fsl_asrc_dma_hw_free(struct snd_soc_component *component,
290 struct snd_pcm_substream *substream)
291 {
292 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
293 struct snd_pcm_runtime *runtime = substream->runtime;
294 struct fsl_asrc_pair *pair = runtime->private_data;
295 u8 dir = tx ? OUT : IN;
296
297 snd_pcm_set_runtime_buffer(substream, NULL);
298
299 if (pair->dma_chan[!dir])
300 dma_release_channel(pair->dma_chan[!dir]);
301
302 /* release dev_to_dev chan if we aren't reusing the Back-End one */
303 if (pair->dma_chan[dir] && pair->req_dma_chan)
304 dma_release_channel(pair->dma_chan[dir]);
305
306 pair->dma_chan[!dir] = NULL;
307 pair->dma_chan[dir] = NULL;
308
309 return 0;
310 }
311
fsl_asrc_dma_startup(struct snd_soc_component * component,struct snd_pcm_substream * substream)312 static int fsl_asrc_dma_startup(struct snd_soc_component *component,
313 struct snd_pcm_substream *substream)
314 {
315 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
316 struct snd_soc_pcm_runtime *rtd = asoc_substream_to_rtd(substream);
317 struct snd_pcm_runtime *runtime = substream->runtime;
318 struct snd_dmaengine_dai_dma_data *dma_data;
319 struct device *dev = component->dev;
320 struct fsl_asrc *asrc = dev_get_drvdata(dev);
321 struct fsl_asrc_pair *pair;
322 struct dma_chan *tmp_chan = NULL;
323 u8 dir = tx ? OUT : IN;
324 bool release_pair = true;
325 int ret = 0;
326
327 ret = snd_pcm_hw_constraint_integer(substream->runtime,
328 SNDRV_PCM_HW_PARAM_PERIODS);
329 if (ret < 0) {
330 dev_err(dev, "failed to set pcm hw params periods\n");
331 return ret;
332 }
333
334 pair = kzalloc(sizeof(*pair) + asrc->pair_priv_size, GFP_KERNEL);
335 if (!pair)
336 return -ENOMEM;
337
338 pair->asrc = asrc;
339 pair->private = (void *)pair + sizeof(struct fsl_asrc_pair);
340
341 runtime->private_data = pair;
342
343 /* Request a dummy pair, which will be released later.
344 * Request pair function needs channel num as input, for this
345 * dummy pair, we just request "1" channel temporarily.
346 */
347 ret = asrc->request_pair(1, pair);
348 if (ret < 0) {
349 dev_err(dev, "failed to request asrc pair\n");
350 goto req_pair_err;
351 }
352
353 /* Request a dummy dma channel, which will be released later. */
354 tmp_chan = asrc->get_dma_channel(pair, dir);
355 if (!tmp_chan) {
356 dev_err(dev, "failed to get dma channel\n");
357 ret = -EINVAL;
358 goto dma_chan_err;
359 }
360
361 dma_data = snd_soc_dai_get_dma_data(asoc_rtd_to_cpu(rtd, 0), substream);
362
363 /* Refine the snd_imx_hardware according to caps of DMA. */
364 ret = snd_dmaengine_pcm_refine_runtime_hwparams(substream,
365 dma_data,
366 &snd_imx_hardware,
367 tmp_chan);
368 if (ret < 0) {
369 dev_err(dev, "failed to refine runtime hwparams\n");
370 goto out;
371 }
372
373 release_pair = false;
374 snd_soc_set_runtime_hwparams(substream, &snd_imx_hardware);
375
376 out:
377 dma_release_channel(tmp_chan);
378
379 dma_chan_err:
380 asrc->release_pair(pair);
381
382 req_pair_err:
383 if (release_pair)
384 kfree(pair);
385
386 return ret;
387 }
388
fsl_asrc_dma_shutdown(struct snd_soc_component * component,struct snd_pcm_substream * substream)389 static int fsl_asrc_dma_shutdown(struct snd_soc_component *component,
390 struct snd_pcm_substream *substream)
391 {
392 struct snd_pcm_runtime *runtime = substream->runtime;
393 struct fsl_asrc_pair *pair = runtime->private_data;
394 struct fsl_asrc *asrc;
395
396 if (!pair)
397 return 0;
398
399 asrc = pair->asrc;
400
401 if (asrc->pair[pair->index] == pair)
402 asrc->pair[pair->index] = NULL;
403
404 kfree(pair);
405
406 return 0;
407 }
408
409 static snd_pcm_uframes_t
fsl_asrc_dma_pcm_pointer(struct snd_soc_component * component,struct snd_pcm_substream * substream)410 fsl_asrc_dma_pcm_pointer(struct snd_soc_component *component,
411 struct snd_pcm_substream *substream)
412 {
413 struct snd_pcm_runtime *runtime = substream->runtime;
414 struct fsl_asrc_pair *pair = runtime->private_data;
415
416 return bytes_to_frames(substream->runtime, pair->pos);
417 }
418
fsl_asrc_dma_pcm_new(struct snd_soc_component * component,struct snd_soc_pcm_runtime * rtd)419 static int fsl_asrc_dma_pcm_new(struct snd_soc_component *component,
420 struct snd_soc_pcm_runtime *rtd)
421 {
422 struct snd_card *card = rtd->card->snd_card;
423 struct snd_pcm_substream *substream;
424 struct snd_pcm *pcm = rtd->pcm;
425 int ret, i;
426
427 ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32));
428 if (ret) {
429 dev_err(card->dev, "failed to set DMA mask\n");
430 return ret;
431 }
432
433 for_each_pcm_streams(i) {
434 substream = pcm->streams[i].substream;
435 if (!substream)
436 continue;
437
438 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, pcm->card->dev,
439 FSL_ASRC_DMABUF_SIZE, &substream->dma_buffer);
440 if (ret) {
441 dev_err(card->dev, "failed to allocate DMA buffer\n");
442 goto err;
443 }
444 }
445
446 return 0;
447
448 err:
449 if (--i == 0 && pcm->streams[i].substream)
450 snd_dma_free_pages(&pcm->streams[i].substream->dma_buffer);
451
452 return ret;
453 }
454
fsl_asrc_dma_pcm_free(struct snd_soc_component * component,struct snd_pcm * pcm)455 static void fsl_asrc_dma_pcm_free(struct snd_soc_component *component,
456 struct snd_pcm *pcm)
457 {
458 struct snd_pcm_substream *substream;
459 int i;
460
461 for_each_pcm_streams(i) {
462 substream = pcm->streams[i].substream;
463 if (!substream)
464 continue;
465
466 snd_dma_free_pages(&substream->dma_buffer);
467 substream->dma_buffer.area = NULL;
468 substream->dma_buffer.addr = 0;
469 }
470 }
471
472 struct snd_soc_component_driver fsl_asrc_component = {
473 .name = DRV_NAME,
474 .hw_params = fsl_asrc_dma_hw_params,
475 .hw_free = fsl_asrc_dma_hw_free,
476 .trigger = fsl_asrc_dma_trigger,
477 .open = fsl_asrc_dma_startup,
478 .close = fsl_asrc_dma_shutdown,
479 .pointer = fsl_asrc_dma_pcm_pointer,
480 .pcm_construct = fsl_asrc_dma_pcm_new,
481 .pcm_destruct = fsl_asrc_dma_pcm_free,
482 };
483 EXPORT_SYMBOL_GPL(fsl_asrc_component);
484