1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * ALSA SoC Audio Layer - Rockchip Multi-DAIS-PCM driver
4 *
5 * Copyright (c) 2018 Rockchip Electronics Co. Ltd.
6 * Author: Sugar Zhang <sugar.zhang@rock-chips.com>
7 *
8 */
9
10 #include <linux/module.h>
11 #include <linux/dmaengine.h>
12 #include <linux/dma-mapping.h>
13 #include <sound/dmaengine_pcm.h>
14 #include <sound/pcm.h>
15 #include <sound/pcm_params.h>
16 #include <sound/soc.h>
17
18 #include "rockchip_multi_dais.h"
19
20 #define MAX_FIFO_SIZE 32 /* max fifo size in frames */
21 #define SND_DMAENGINE_MPCM_DRV_NAME "snd_dmaengine_mpcm"
22
23 struct dmaengine_mpcm {
24 struct rk_mdais_dev *mdais;
25 struct dma_chan *tx_chans[MAX_DAIS];
26 struct dma_chan *rx_chans[MAX_DAIS];
27 struct snd_soc_component component;
28 };
29
30 struct dmaengine_mpcm_runtime_data {
31 struct dma_chan *chans[MAX_DAIS];
32 dma_cookie_t cookies[MAX_DAIS];
33 unsigned int *channel_maps;
34 int num_chans;
35 unsigned int pos;
36 unsigned int master_chan;
37 bool start_flag;
38 #ifdef CONFIG_SND_SOC_ROCKCHIP_VAD
39 unsigned int vpos;
40 unsigned int vresidue_bytes;
41 #endif
42 };
43
substream_to_prtd(const struct snd_pcm_substream * substream)44 static inline struct dmaengine_mpcm_runtime_data *substream_to_prtd(
45 const struct snd_pcm_substream *substream)
46 {
47 return substream->runtime->private_data;
48 }
49
soc_component_to_mpcm(struct snd_soc_component * p)50 static struct dmaengine_mpcm *soc_component_to_mpcm(struct snd_soc_component *p)
51 {
52 return container_of(p, struct dmaengine_mpcm, component);
53 }
54
to_chan(struct dmaengine_mpcm * pcm,struct snd_pcm_substream * substream)55 static struct dma_chan *to_chan(struct dmaengine_mpcm *pcm,
56 struct snd_pcm_substream *substream)
57 {
58 struct dma_chan *chan = NULL;
59 int i;
60
61 for (i = 0; i < pcm->mdais->num_dais; i++) {
62 chan = substream->stream ? pcm->rx_chans[i] : pcm->tx_chans[i];
63 if (chan)
64 break;
65 }
66
67 return chan;
68 }
69
dmaengine_dma_dev(struct dmaengine_mpcm * pcm,struct snd_pcm_substream * substream)70 static struct device *dmaengine_dma_dev(struct dmaengine_mpcm *pcm,
71 struct snd_pcm_substream *substream)
72 {
73 struct dma_chan *chan;
74
75 chan = to_chan(pcm, substream);
76 if (!chan)
77 return NULL;
78
79 return chan->device->dev;
80 }
81
snd_dmaengine_mpcm_set_config_from_dai_data(const struct snd_pcm_substream * substream,const struct snd_dmaengine_dai_dma_data * dma_data,struct dma_slave_config * slave_config)82 static void snd_dmaengine_mpcm_set_config_from_dai_data(
83 const struct snd_pcm_substream *substream,
84 const struct snd_dmaengine_dai_dma_data *dma_data,
85 struct dma_slave_config *slave_config)
86 {
87 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
88 slave_config->dst_addr = dma_data->addr;
89 if (dma_data->addr_width != DMA_SLAVE_BUSWIDTH_UNDEFINED)
90 slave_config->dst_addr_width = dma_data->addr_width;
91 } else {
92 slave_config->src_addr = dma_data->addr;
93 if (dma_data->addr_width != DMA_SLAVE_BUSWIDTH_UNDEFINED)
94 slave_config->src_addr_width = dma_data->addr_width;
95 }
96
97 slave_config->slave_id = dma_data->slave_id;
98 }
99
dmaengine_mpcm_dma_complete(void * arg)100 static void dmaengine_mpcm_dma_complete(void *arg)
101 {
102 struct snd_pcm_substream *substream = arg;
103 #ifdef CONFIG_SND_SOC_ROCKCHIP_VAD
104 struct dmaengine_mpcm_runtime_data *prtd = substream_to_prtd(substream);
105
106 if (snd_pcm_vad_attached(substream) &&
107 substream->stream == SNDRV_PCM_STREAM_CAPTURE) {
108 void *buf = substream->runtime->dma_area + prtd->pos;
109
110 snd_pcm_vad_preprocess(substream, buf,
111 substream->runtime->period_size);
112 }
113
114 prtd->pos += snd_pcm_lib_period_bytes(substream);
115 if (prtd->pos >= snd_pcm_lib_buffer_bytes(substream))
116 prtd->pos = 0;
117
118 #endif
119 snd_pcm_period_elapsed(substream);
120 }
121
dmaengine_mpcm_get_master_chan(struct dmaengine_mpcm_runtime_data * prtd)122 static void dmaengine_mpcm_get_master_chan(struct dmaengine_mpcm_runtime_data *prtd)
123 {
124 int i;
125
126 for (i = prtd->num_chans; i > 0; i--) {
127 if (prtd->chans[i - 1]) {
128 prtd->master_chan = i - 1;
129 break;
130 }
131 }
132 }
133
dmaengine_mpcm_prepare_and_submit(struct snd_pcm_substream * substream)134 static int dmaengine_mpcm_prepare_and_submit(struct snd_pcm_substream *substream)
135 {
136 struct dmaengine_mpcm_runtime_data *prtd = substream_to_prtd(substream);
137 struct snd_pcm_runtime *runtime = substream->runtime;
138 struct dma_async_tx_descriptor *desc = NULL;
139 enum dma_transfer_direction direction;
140 unsigned long flags = DMA_CTRL_ACK;
141 unsigned int *maps = prtd->channel_maps;
142 int offset, buffer_bytes, period_bytes;
143 int i;
144
145 direction = snd_pcm_substream_to_dma_direction(substream);
146
147 if (!substream->runtime->no_period_wakeup)
148 flags |= DMA_PREP_INTERRUPT;
149
150 prtd->pos = 0;
151 offset = 0;
152 period_bytes = snd_pcm_lib_period_bytes(substream);
153 buffer_bytes = snd_pcm_lib_buffer_bytes(substream);
154 for (i = 0; i < prtd->num_chans; i++) {
155 if (!prtd->chans[i])
156 continue;
157 desc = dmaengine_prep_dma_cyclic(prtd->chans[i],
158 runtime->dma_addr + offset,
159 buffer_bytes, period_bytes,
160 direction, flags);
161
162 if (!desc)
163 return -ENOMEM;
164
165 prtd->cookies[i] = dmaengine_submit(desc);
166 offset += samples_to_bytes(runtime, maps[i]);
167 }
168
169 if (desc) {
170 desc->callback = dmaengine_mpcm_dma_complete;
171 desc->callback_param = substream;
172 dmaengine_mpcm_get_master_chan(prtd);
173 } else {
174 return -ENOMEM;
175 }
176
177 return 0;
178 }
179
mpcm_dma_async_issue_pending(struct dmaengine_mpcm_runtime_data * prtd)180 static void mpcm_dma_async_issue_pending(struct dmaengine_mpcm_runtime_data *prtd)
181 {
182 int i;
183
184 for (i = 0; i < prtd->num_chans; i++) {
185 if (prtd->chans[i])
186 dma_async_issue_pending(prtd->chans[i]);
187 }
188 }
189
mpcm_dmaengine_resume(struct dmaengine_mpcm_runtime_data * prtd)190 static void mpcm_dmaengine_resume(struct dmaengine_mpcm_runtime_data *prtd)
191 {
192 int i;
193
194 for (i = 0; i < prtd->num_chans; i++) {
195 if (prtd->chans[i])
196 dmaengine_resume(prtd->chans[i]);
197 }
198 }
199
mpcm_dmaengine_pause(struct dmaengine_mpcm_runtime_data * prtd)200 static void mpcm_dmaengine_pause(struct dmaengine_mpcm_runtime_data *prtd)
201 {
202 int i;
203
204 for (i = 0; i < prtd->num_chans; i++) {
205 if (prtd->chans[i])
206 dmaengine_pause(prtd->chans[i]);
207 }
208 }
209
mpcm_dmaengine_terminate_all(struct dmaengine_mpcm_runtime_data * prtd)210 static void mpcm_dmaengine_terminate_all(struct dmaengine_mpcm_runtime_data *prtd)
211 {
212 int i;
213
214 for (i = 0; i < prtd->num_chans; i++) {
215 if (prtd->chans[i])
216 dmaengine_terminate_all(prtd->chans[i]);
217 }
218 }
219
220 #ifdef CONFIG_SND_SOC_ROCKCHIP_VAD
dmaengine_mpcm_single_dma_complete(void * arg)221 static void dmaengine_mpcm_single_dma_complete(void *arg)
222 {
223 struct snd_pcm_substream *substream = arg;
224 struct dmaengine_mpcm_runtime_data *prtd = substream_to_prtd(substream);
225 unsigned int pos, size;
226 void *buf;
227
228 if (snd_pcm_vad_attached(substream) &&
229 substream->stream == SNDRV_PCM_STREAM_CAPTURE) {
230 buf = substream->runtime->dma_area + prtd->vpos;
231 pos = prtd->vpos + snd_pcm_lib_period_bytes(substream);
232
233 if (pos <= snd_pcm_lib_buffer_bytes(substream))
234 size = substream->runtime->period_size;
235 else
236 size = bytes_to_frames(substream->runtime,
237 prtd->vresidue_bytes);
238 snd_pcm_vad_preprocess(substream, buf, size);
239 }
240
241 prtd->vpos += snd_pcm_lib_period_bytes(substream);
242 if (prtd->vpos >= snd_pcm_lib_buffer_bytes(substream))
243 prtd->vpos = 0;
244 snd_pcm_period_elapsed(substream);
245 }
246
__mpcm_prepare_single_and_submit(struct snd_pcm_substream * substream,dma_addr_t buf_start,int size)247 static int __mpcm_prepare_single_and_submit(struct snd_pcm_substream *substream,
248 dma_addr_t buf_start, int size)
249 {
250 struct dmaengine_mpcm_runtime_data *prtd = substream_to_prtd(substream);
251 struct snd_pcm_runtime *runtime = substream->runtime;
252 struct dma_async_tx_descriptor *desc;
253 enum dma_transfer_direction direction;
254 unsigned long flags = DMA_CTRL_ACK;
255 unsigned int *maps = prtd->channel_maps;
256 int offset, i;
257 bool callback = false;
258
259 direction = snd_pcm_substream_to_dma_direction(substream);
260
261 if (!substream->runtime->no_period_wakeup)
262 flags |= DMA_PREP_INTERRUPT;
263
264 offset = 0;
265 for (i = 0; i < prtd->num_chans; i++) {
266 if (!prtd->chans[i])
267 continue;
268 desc = dmaengine_prep_slave_single(prtd->chans[i],
269 buf_start + offset,
270 size,
271 direction, flags);
272
273 if (!desc)
274 return -ENOMEM;
275 if (!callback) {
276 desc->callback = dmaengine_mpcm_single_dma_complete;
277 desc->callback_param = substream;
278 callback = true;
279 }
280 dmaengine_submit(desc);
281 offset += samples_to_bytes(runtime, maps[i]);
282 }
283
284 return 0;
285 }
286
dmaengine_mpcm_prepare_single_and_submit(struct snd_pcm_substream * substream)287 static int dmaengine_mpcm_prepare_single_and_submit(struct snd_pcm_substream *substream)
288 {
289 struct dmaengine_mpcm_runtime_data *prtd = substream_to_prtd(substream);
290 enum dma_transfer_direction direction;
291 unsigned long flags = DMA_CTRL_ACK;
292 snd_pcm_uframes_t avail;
293 dma_addr_t buf_start, buf_end;
294 int offset, i, count, ret;
295 int buffer_bytes, period_bytes, residue_bytes;
296
297 direction = snd_pcm_substream_to_dma_direction(substream);
298
299 if (!substream->runtime->no_period_wakeup)
300 flags |= DMA_PREP_INTERRUPT;
301
302 period_bytes = snd_pcm_lib_period_bytes(substream);
303 buffer_bytes = snd_pcm_lib_buffer_bytes(substream);
304 avail = snd_pcm_vad_avail(substream);
305 offset = frames_to_bytes(substream->runtime, avail);
306 prtd->vpos = offset;
307 buf_start = substream->runtime->dma_addr + offset;
308 buf_end = substream->runtime->dma_addr + snd_pcm_lib_buffer_bytes(substream);
309 count = (buf_end - buf_start) / period_bytes;
310 residue_bytes = (buf_end - buf_start) % period_bytes;
311 prtd->vresidue_bytes = residue_bytes;
312 pr_debug("%s: offset: %d, buffer_bytes: %d\n", __func__, offset, buffer_bytes);
313 pr_debug("%s: count: %d, residue_bytes: %d\n", __func__, count, residue_bytes);
314 for (i = 0; i < count; i++) {
315 ret = __mpcm_prepare_single_and_submit(substream, buf_start,
316 period_bytes);
317 if (ret)
318 return ret;
319 buf_start += period_bytes;
320 }
321
322 if (residue_bytes) {
323 ret = __mpcm_prepare_single_and_submit(substream, buf_start,
324 residue_bytes);
325 if (ret)
326 return ret;
327 }
328
329 return 0;
330 }
331 #endif
332
snd_dmaengine_mpcm_trigger(struct snd_soc_component * component,struct snd_pcm_substream * substream,int cmd)333 static int snd_dmaengine_mpcm_trigger(struct snd_soc_component *component,
334 struct snd_pcm_substream *substream, int cmd)
335 {
336 struct dmaengine_mpcm_runtime_data *prtd = substream_to_prtd(substream);
337 struct snd_pcm_runtime *runtime = substream->runtime;
338 int ret;
339
340 switch (cmd) {
341 case SNDRV_PCM_TRIGGER_START:
342 #ifdef CONFIG_SND_SOC_ROCKCHIP_VAD
343 if (substream->stream == SNDRV_PCM_STREAM_CAPTURE &&
344 snd_pcm_vad_attached(substream) &&
345 snd_pcm_vad_avail(substream)) {
346 dmaengine_mpcm_prepare_single_and_submit(substream);
347 mpcm_dma_async_issue_pending(prtd);
348 }
349 #endif
350 ret = dmaengine_mpcm_prepare_and_submit(substream);
351 if (ret)
352 return ret;
353 mpcm_dma_async_issue_pending(prtd);
354 break;
355 case SNDRV_PCM_TRIGGER_RESUME:
356 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
357 mpcm_dmaengine_resume(prtd);
358 break;
359 case SNDRV_PCM_TRIGGER_SUSPEND:
360 if (runtime->info & SNDRV_PCM_INFO_PAUSE)
361 mpcm_dmaengine_pause(prtd);
362 else
363 mpcm_dmaengine_terminate_all(prtd);
364 prtd->start_flag = false;
365 break;
366 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
367 mpcm_dmaengine_pause(prtd);
368 prtd->start_flag = false;
369 break;
370 case SNDRV_PCM_TRIGGER_STOP:
371 mpcm_dmaengine_terminate_all(prtd);
372 prtd->start_flag = false;
373 break;
374 default:
375 return -EINVAL;
376 }
377
378 return 0;
379 }
380
dmaengine_mpcm_hw_params(struct snd_soc_component * component,struct snd_pcm_substream * substream,struct snd_pcm_hw_params * params)381 static int dmaengine_mpcm_hw_params(struct snd_soc_component *component,
382 struct snd_pcm_substream *substream,
383 struct snd_pcm_hw_params *params)
384 {
385 struct dmaengine_mpcm *pcm = soc_component_to_mpcm(component);
386 struct dma_chan *chan;
387 struct snd_dmaengine_dai_dma_data *dma_data;
388 struct dma_slave_config slave_config;
389 snd_pcm_format_t format;
390 unsigned int *maps;
391 int frame_bytes;
392 int ret, num, i, sz;
393
394 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
395 maps = pcm->mdais->playback_channel_maps;
396 else
397 maps = pcm->mdais->capture_channel_maps;
398 format = params_format(params);
399 frame_bytes = snd_pcm_format_size(format, params_channels(params));
400 num = pcm->mdais->num_dais;
401
402 for (i = 0; i < num; i++) {
403 memset(&slave_config, 0, sizeof(slave_config));
404 ret = snd_hwparams_to_dma_slave_config(substream, params,
405 &slave_config);
406 if (ret)
407 return ret;
408
409 dma_data = snd_soc_dai_get_dma_data(pcm->mdais->dais[i].dai,
410 substream);
411 if (!dma_data)
412 continue;
413
414 snd_dmaengine_mpcm_set_config_from_dai_data(substream,
415 dma_data,
416 &slave_config);
417
418 /* refine params for interlace access */
419 sz = snd_pcm_format_size(format, maps[i]);
420 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
421 chan = pcm->tx_chans[i];
422 #ifdef CONFIG_NO_GKI
423 if (sz) {
424 slave_config.src_interlace_size = frame_bytes - sz;
425 if (slave_config.src_interlace_size)
426 slave_config.dst_maxburst = sz / slave_config.dst_addr_width;
427 }
428 #endif
429 } else {
430 chan = pcm->rx_chans[i];
431 #ifdef CONFIG_NO_GKI
432 if (sz) {
433 slave_config.dst_interlace_size = frame_bytes - sz;
434 if (slave_config.dst_interlace_size)
435 slave_config.src_maxburst = sz / slave_config.src_addr_width;
436 }
437 #endif
438 }
439 if (!chan)
440 continue;
441
442 ret = dmaengine_slave_config(chan, &slave_config);
443 if (ret)
444 return ret;
445 }
446 return snd_pcm_lib_malloc_pages(substream, params_buffer_bytes(params));
447 }
448
dmaengine_mpcm_set_runtime_hwparams(struct snd_pcm_substream * substream)449 static int dmaengine_mpcm_set_runtime_hwparams(struct snd_pcm_substream *substream)
450 {
451 struct snd_soc_pcm_runtime *rtd = substream->private_data;
452 struct snd_soc_component *component =
453 snd_soc_rtdcom_lookup(rtd, SND_DMAENGINE_MPCM_DRV_NAME);
454 struct dmaengine_mpcm *pcm = soc_component_to_mpcm(component);
455 struct device *dma_dev = dmaengine_dma_dev(pcm, substream);
456 struct dma_chan *chan;
457 struct dma_slave_caps dma_caps;
458 struct snd_pcm_hardware hw;
459 u32 addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
460 BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
461 BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
462 snd_pcm_format_t i;
463 int ret;
464
465 chan = to_chan(pcm, substream);
466 if (!chan)
467 return -EINVAL;
468
469 memset(&hw, 0, sizeof(hw));
470 hw.info = SNDRV_PCM_INFO_MMAP | SNDRV_PCM_INFO_MMAP_VALID |
471 SNDRV_PCM_INFO_INTERLEAVED;
472 hw.periods_min = 2;
473 hw.periods_max = UINT_MAX;
474 hw.period_bytes_min = 256;
475 hw.period_bytes_max = dma_get_max_seg_size(dma_dev);
476 hw.buffer_bytes_max = SIZE_MAX;
477
478 ret = dma_get_slave_caps(chan, &dma_caps);
479 if (ret == 0) {
480 if (dma_caps.cmd_pause && dma_caps.cmd_resume)
481 hw.info |= SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME;
482 if (dma_caps.residue_granularity <= DMA_RESIDUE_GRANULARITY_SEGMENT)
483 hw.info |= SNDRV_PCM_INFO_BATCH;
484
485 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
486 addr_widths = dma_caps.dst_addr_widths;
487 else
488 addr_widths = dma_caps.src_addr_widths;
489 }
490
491 /*
492 * Prepare formats mask for valid/allowed sample types. If the dma does
493 * not have support for the given physical word size, it needs to be
494 * masked out so user space can not use the format which produces
495 * corrupted audio.
496 * In case the dma driver does not implement the slave_caps the default
497 * assumption is that it supports 1, 2 and 4 bytes widths.
498 */
499 for (i = 0; i <= SNDRV_PCM_FORMAT_LAST; i++) {
500 int bits = snd_pcm_format_physical_width(i);
501
502 /* Enable only samples with DMA supported physical widths */
503 switch (bits) {
504 case 8:
505 case 16:
506 case 24:
507 case 32:
508 case 64:
509 if (addr_widths & (1 << (bits / 8)))
510 hw.formats |= pcm_format_to_bits(i);
511 break;
512 default:
513 /* Unsupported types */
514 break;
515 }
516 }
517
518 return snd_soc_set_runtime_hwparams(substream, &hw);
519 }
520
dmaengine_mpcm_open(struct snd_soc_component * component,struct snd_pcm_substream * substream)521 static int dmaengine_mpcm_open(struct snd_soc_component *component,
522 struct snd_pcm_substream *substream)
523 {
524 struct dmaengine_mpcm *pcm = soc_component_to_mpcm(component);
525 struct dmaengine_mpcm_runtime_data *prtd;
526 int ret, i;
527
528 ret = dmaengine_mpcm_set_runtime_hwparams(substream);
529 if (ret)
530 return ret;
531
532 ret = snd_pcm_hw_constraint_integer(substream->runtime,
533 SNDRV_PCM_HW_PARAM_PERIODS);
534 if (ret < 0)
535 return ret;
536
537 prtd = kzalloc(sizeof(*prtd), GFP_KERNEL);
538 if (!prtd)
539 return -ENOMEM;
540
541 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
542 prtd->channel_maps = pcm->mdais->playback_channel_maps;
543 for (i = 0; i < pcm->mdais->num_dais; i++)
544 prtd->chans[i] = pcm->tx_chans[i];
545 } else {
546 prtd->channel_maps = pcm->mdais->capture_channel_maps;
547 for (i = 0; i < pcm->mdais->num_dais; i++)
548 prtd->chans[i] = pcm->rx_chans[i];
549 }
550
551 prtd->num_chans = pcm->mdais->num_dais;
552 prtd->start_flag = false;
553 substream->runtime->private_data = prtd;
554
555 return 0;
556 }
557
dmaengine_mpcm_new(struct snd_soc_component * component,struct snd_soc_pcm_runtime * rtd)558 static int dmaengine_mpcm_new(struct snd_soc_component *component, struct snd_soc_pcm_runtime *rtd)
559 {
560 struct dmaengine_mpcm *pcm = soc_component_to_mpcm(component);
561 struct snd_pcm_substream *substream;
562 size_t prealloc_buffer_size;
563 size_t max_buffer_size;
564 unsigned int i;
565
566 prealloc_buffer_size = 512 * 1024;
567 max_buffer_size = SIZE_MAX;
568
569 for (i = SNDRV_PCM_STREAM_PLAYBACK; i <= SNDRV_PCM_STREAM_CAPTURE; i++) {
570 substream = rtd->pcm->streams[i].substream;
571 if (!substream)
572 continue;
573
574 snd_pcm_lib_preallocate_pages(substream,
575 SNDRV_DMA_TYPE_DEV_IRAM,
576 dmaengine_dma_dev(pcm, substream),
577 prealloc_buffer_size,
578 max_buffer_size);
579 }
580
581 return 0;
582 }
583
dmaengine_mpcm_pointer(struct snd_soc_component * component,struct snd_pcm_substream * substream)584 static snd_pcm_uframes_t dmaengine_mpcm_pointer(struct snd_soc_component *component,
585 struct snd_pcm_substream *substream)
586 {
587 struct dmaengine_mpcm_runtime_data *prtd = substream_to_prtd(substream);
588 struct snd_pcm_runtime *runtime = substream->runtime;
589 struct dma_tx_state state;
590 snd_pcm_uframes_t frames;
591 unsigned int buf_size;
592 unsigned int pos = 0;
593 unsigned int master = prtd->master_chan;
594
595 buf_size = snd_pcm_lib_buffer_bytes(substream);
596 dmaengine_tx_status(prtd->chans[master], prtd->cookies[master], &state);
597 if (state.residue > 0 && state.residue <= buf_size)
598 pos = buf_size - state.residue;
599
600 frames = bytes_to_frames(substream->runtime, pos);
601 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
602 return frames;
603
604 #ifdef CONFIG_SND_SOC_ROCKCHIP_VAD
605 if (prtd->vpos)
606 frames = bytes_to_frames(substream->runtime, prtd->vpos);
607 #endif
608 if (!prtd->start_flag && frames >= MAX_FIFO_SIZE)
609 prtd->start_flag = true;
610
611 if (prtd->start_flag) {
612 if (frames >= MAX_FIFO_SIZE)
613 frames -= MAX_FIFO_SIZE;
614 else
615 frames = runtime->buffer_size + frames - MAX_FIFO_SIZE;
616 } else {
617 frames = 0;
618 }
619
620 return frames;
621 }
622
dmaengine_mpcm_ioctl(struct snd_soc_component * component,struct snd_pcm_substream * substream,unsigned int cmd,void * arg)623 static int dmaengine_mpcm_ioctl(struct snd_soc_component *component,
624 struct snd_pcm_substream *substream,
625 unsigned int cmd, void *arg)
626 {
627 return snd_pcm_lib_ioctl(substream, cmd, arg);
628 }
629
dmaengine_mpcm_hw_free(struct snd_soc_component * component,struct snd_pcm_substream * substream)630 static int dmaengine_mpcm_hw_free(struct snd_soc_component *component,
631 struct snd_pcm_substream *substream)
632 {
633 return snd_pcm_lib_free_pages(substream);
634 }
635
dmaengine_mpcm_close(struct snd_soc_component * component,struct snd_pcm_substream * substream)636 static int dmaengine_mpcm_close(struct snd_soc_component *component,
637 struct snd_pcm_substream *substream)
638 {
639 struct dmaengine_mpcm_runtime_data *prtd = substream_to_prtd(substream);
640
641 kfree(prtd);
642
643 return 0;
644 }
645
646 static const struct snd_soc_component_driver dmaengine_mpcm_platform = {
647 .name = SND_DMAENGINE_MPCM_DRV_NAME,
648 .probe_order = SND_SOC_COMP_ORDER_LATE,
649 .pcm_construct = dmaengine_mpcm_new,
650 .open = dmaengine_mpcm_open,
651 .close = dmaengine_mpcm_close,
652 .ioctl = dmaengine_mpcm_ioctl,
653 .hw_params = dmaengine_mpcm_hw_params,
654 .hw_free = dmaengine_mpcm_hw_free,
655 .trigger = snd_dmaengine_mpcm_trigger,
656 .pointer = dmaengine_mpcm_pointer,
657 };
658
dmaengine_mpcm_release_chan(struct dmaengine_mpcm * pcm)659 static void dmaengine_mpcm_release_chan(struct dmaengine_mpcm *pcm)
660 {
661 int i;
662
663 for (i = 0; i < pcm->mdais->num_dais; i++) {
664 if (pcm->tx_chans[i])
665 dma_release_channel(pcm->tx_chans[i]);
666 if (pcm->rx_chans[i])
667 dma_release_channel(pcm->rx_chans[i]);
668 }
669 }
670
snd_dmaengine_mpcm_register(struct rk_mdais_dev * mdais)671 int snd_dmaengine_mpcm_register(struct rk_mdais_dev *mdais)
672 {
673 struct device *dev;
674 struct device *child;
675 struct dmaengine_mpcm *pcm;
676 struct dma_chan *chan;
677 unsigned int *tx_maps, *rx_maps;
678 int ret, i, num;
679
680 dev = mdais->dev;
681 num = mdais->num_dais;
682 tx_maps = mdais->playback_channel_maps;
683 rx_maps = mdais->capture_channel_maps;
684 pcm = kzalloc(sizeof(*pcm), GFP_KERNEL);
685 if (!pcm)
686 return -ENOMEM;
687
688 pcm->mdais = mdais;
689 for (i = 0; i < num; i++) {
690 child = mdais->dais[i].dev;
691 if (tx_maps[i]) {
692 chan = dma_request_chan(child, "tx");
693 if (IS_ERR(chan))
694 chan = NULL;
695 pcm->tx_chans[i] = chan;
696 }
697
698 if (rx_maps[i]) {
699 chan = dma_request_chan(child, "rx");
700 if (IS_ERR(chan))
701 chan = NULL;
702 pcm->rx_chans[i] = chan;
703 }
704 }
705
706 ret = snd_soc_component_initialize(&pcm->component, &dmaengine_mpcm_platform,
707 dev);
708 if (ret)
709 goto err_free_dma;
710
711 ret = snd_soc_add_component(&pcm->component, NULL, 0);
712 if (ret)
713 goto err_free_dma;
714
715 return 0;
716
717 err_free_dma:
718 dmaengine_mpcm_release_chan(pcm);
719 kfree(pcm);
720 return ret;
721 }
722 EXPORT_SYMBOL_GPL(snd_dmaengine_mpcm_register);
723
snd_dmaengine_mpcm_unregister(struct device * dev)724 void snd_dmaengine_mpcm_unregister(struct device *dev)
725 {
726 struct snd_soc_component *component;
727 struct dmaengine_mpcm *pcm;
728
729 component = snd_soc_lookup_component(dev, SND_DMAENGINE_MPCM_DRV_NAME);
730 if (!component)
731 return;
732
733 pcm = soc_component_to_mpcm(component);
734
735 snd_soc_unregister_component(dev);
736 dmaengine_mpcm_release_chan(pcm);
737 kfree(pcm);
738 }
739 EXPORT_SYMBOL_GPL(snd_dmaengine_mpcm_unregister);
740
741 MODULE_LICENSE("GPL");
742