ASoC: AMD: add AMD ASoC ACP 2.x DMA driver
[deliverable/linux.git] / sound / soc / amd / acp-pcm-dma.c
1 /*
2 * AMD ALSA SoC PCM Driver for ACP 2.x
3 *
4 * Copyright 2014-2015 Advanced Micro Devices, Inc.
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms and conditions of the GNU General Public License,
8 * version 2, as published by the Free Software Foundation.
9 *
10 * This program is distributed in the hope it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 */
15
16 #include <linux/module.h>
17 #include <linux/delay.h>
18 #include <linux/sizes.h>
19
20 #include <sound/soc.h>
21
22 #include "acp.h"
23
24 #define PLAYBACK_MIN_NUM_PERIODS 2
25 #define PLAYBACK_MAX_NUM_PERIODS 2
26 #define PLAYBACK_MAX_PERIOD_SIZE 16384
27 #define PLAYBACK_MIN_PERIOD_SIZE 1024
28 #define CAPTURE_MIN_NUM_PERIODS 2
29 #define CAPTURE_MAX_NUM_PERIODS 2
30 #define CAPTURE_MAX_PERIOD_SIZE 16384
31 #define CAPTURE_MIN_PERIOD_SIZE 1024
32
33 #define MAX_BUFFER (PLAYBACK_MAX_PERIOD_SIZE * PLAYBACK_MAX_NUM_PERIODS)
34 #define MIN_BUFFER MAX_BUFFER
35
36 static const struct snd_pcm_hardware acp_pcm_hardware_playback = {
37 .info = SNDRV_PCM_INFO_INTERLEAVED |
38 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
39 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
40 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
41 .formats = SNDRV_PCM_FMTBIT_S16_LE |
42 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
43 .channels_min = 1,
44 .channels_max = 8,
45 .rates = SNDRV_PCM_RATE_8000_96000,
46 .rate_min = 8000,
47 .rate_max = 96000,
48 .buffer_bytes_max = PLAYBACK_MAX_NUM_PERIODS * PLAYBACK_MAX_PERIOD_SIZE,
49 .period_bytes_min = PLAYBACK_MIN_PERIOD_SIZE,
50 .period_bytes_max = PLAYBACK_MAX_PERIOD_SIZE,
51 .periods_min = PLAYBACK_MIN_NUM_PERIODS,
52 .periods_max = PLAYBACK_MAX_NUM_PERIODS,
53 };
54
55 static const struct snd_pcm_hardware acp_pcm_hardware_capture = {
56 .info = SNDRV_PCM_INFO_INTERLEAVED |
57 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
58 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
59 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
60 .formats = SNDRV_PCM_FMTBIT_S16_LE |
61 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
62 .channels_min = 1,
63 .channels_max = 2,
64 .rates = SNDRV_PCM_RATE_8000_48000,
65 .rate_min = 8000,
66 .rate_max = 48000,
67 .buffer_bytes_max = CAPTURE_MAX_NUM_PERIODS * CAPTURE_MAX_PERIOD_SIZE,
68 .period_bytes_min = CAPTURE_MIN_PERIOD_SIZE,
69 .period_bytes_max = CAPTURE_MAX_PERIOD_SIZE,
70 .periods_min = CAPTURE_MIN_NUM_PERIODS,
71 .periods_max = CAPTURE_MAX_NUM_PERIODS,
72 };
73
74 struct audio_drv_data {
75 struct snd_pcm_substream *play_stream;
76 struct snd_pcm_substream *capture_stream;
77 void __iomem *acp_mmio;
78 };
79
80 static u32 acp_reg_read(void __iomem *acp_mmio, u32 reg)
81 {
82 return readl(acp_mmio + (reg * 4));
83 }
84
85 static void acp_reg_write(u32 val, void __iomem *acp_mmio, u32 reg)
86 {
87 writel(val, acp_mmio + (reg * 4));
88 }
89
90 /* Configure a given dma channel parameters - enable/disble,
91 * number of descriptors, priority
92 */
93 static void config_acp_dma_channel(void __iomem *acp_mmio, u8 ch_num,
94 u16 dscr_strt_idx, u16 num_dscrs,
95 enum acp_dma_priority_level priority_level)
96 {
97 u32 dma_ctrl;
98
99 /* disable the channel run field */
100 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
101 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRun_MASK;
102 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
103
104 /* program a DMA channel with first descriptor to be processed. */
105 acp_reg_write((ACP_DMA_DSCR_STRT_IDX_0__DMAChDscrStrtIdx_MASK
106 & dscr_strt_idx),
107 acp_mmio, mmACP_DMA_DSCR_STRT_IDX_0 + ch_num);
108
109 /* program a DMA channel with the number of descriptors to be
110 * processed in the transfer
111 */
112 acp_reg_write(ACP_DMA_DSCR_CNT_0__DMAChDscrCnt_MASK & num_dscrs,
113 acp_mmio, mmACP_DMA_DSCR_CNT_0 + ch_num);
114
115 /* set DMA channel priority */
116 acp_reg_write(priority_level, acp_mmio, mmACP_DMA_PRIO_0 + ch_num);
117 }
118
119 /* Initialize a dma descriptor in SRAM based on descritor information passed */
120 static void config_dma_descriptor_in_sram(void __iomem *acp_mmio,
121 u16 descr_idx,
122 acp_dma_dscr_transfer_t *descr_info)
123 {
124 u32 sram_offset;
125
126 sram_offset = (descr_idx * sizeof(acp_dma_dscr_transfer_t));
127
128 /* program the source base address. */
129 acp_reg_write(sram_offset, acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
130 acp_reg_write(descr_info->src, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
131 /* program the destination base address. */
132 acp_reg_write(sram_offset + 4, acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
133 acp_reg_write(descr_info->dest, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
134
135 /* program the number of bytes to be transferred for this descriptor. */
136 acp_reg_write(sram_offset + 8, acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
137 acp_reg_write(descr_info->xfer_val, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
138 }
139
140 /* Initialize the DMA descriptor information for transfer between
141 * system memory <-> ACP SRAM
142 */
143 static void set_acp_sysmem_dma_descriptors(void __iomem *acp_mmio,
144 u32 size, int direction,
145 u32 pte_offset)
146 {
147 u16 i;
148 u16 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH12;
149 acp_dma_dscr_transfer_t dmadscr[NUM_DSCRS_PER_CHANNEL];
150
151 for (i = 0; i < NUM_DSCRS_PER_CHANNEL; i++) {
152 dmadscr[i].xfer_val = 0;
153 if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
154 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH12 + i;
155 dmadscr[i].dest = ACP_SHARED_RAM_BANK_1_ADDRESS +
156 (size / 2) - (i * (size/2));
157 dmadscr[i].src = ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS
158 + (pte_offset * SZ_4K) + (i * (size/2));
159 dmadscr[i].xfer_val |=
160 (ACP_DMA_ATTRIBUTES_DAGB_ONION_TO_SHAREDMEM << 16) |
161 (size / 2);
162 } else {
163 dma_dscr_idx = CAPTURE_START_DMA_DESCR_CH14 + i;
164 dmadscr[i].src = ACP_SHARED_RAM_BANK_5_ADDRESS +
165 (i * (size/2));
166 dmadscr[i].dest = ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS
167 + (pte_offset * SZ_4K) +
168 (i * (size/2));
169 dmadscr[i].xfer_val |=
170 BIT(22) |
171 (ACP_DMA_ATTRIBUTES_SHAREDMEM_TO_DAGB_ONION << 16) |
172 (size / 2);
173 }
174 config_dma_descriptor_in_sram(acp_mmio, dma_dscr_idx,
175 &dmadscr[i]);
176 }
177 if (direction == SNDRV_PCM_STREAM_PLAYBACK)
178 config_acp_dma_channel(acp_mmio, SYSRAM_TO_ACP_CH_NUM,
179 PLAYBACK_START_DMA_DESCR_CH12,
180 NUM_DSCRS_PER_CHANNEL,
181 ACP_DMA_PRIORITY_LEVEL_NORMAL);
182 else
183 config_acp_dma_channel(acp_mmio, ACP_TO_SYSRAM_CH_NUM,
184 CAPTURE_START_DMA_DESCR_CH14,
185 NUM_DSCRS_PER_CHANNEL,
186 ACP_DMA_PRIORITY_LEVEL_NORMAL);
187 }
188
189 /* Initialize the DMA descriptor information for transfer between
190 * ACP SRAM <-> I2S
191 */
192 static void set_acp_to_i2s_dma_descriptors(void __iomem *acp_mmio,
193 u32 size, int direction)
194 {
195
196 u16 i;
197 u16 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH13;
198 acp_dma_dscr_transfer_t dmadscr[NUM_DSCRS_PER_CHANNEL];
199
200 for (i = 0; i < NUM_DSCRS_PER_CHANNEL; i++) {
201 dmadscr[i].xfer_val = 0;
202 if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
203 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH13 + i;
204 dmadscr[i].src = ACP_SHARED_RAM_BANK_1_ADDRESS +
205 (i * (size/2));
206 /* dmadscr[i].dest is unused by hardware. */
207 dmadscr[i].dest = 0;
208 dmadscr[i].xfer_val |= BIT(22) | (TO_ACP_I2S_1 << 16) |
209 (size / 2);
210 } else {
211 dma_dscr_idx = CAPTURE_START_DMA_DESCR_CH15 + i;
212 /* dmadscr[i].src is unused by hardware. */
213 dmadscr[i].src = 0;
214 dmadscr[i].dest = ACP_SHARED_RAM_BANK_5_ADDRESS +
215 (i * (size / 2));
216 dmadscr[i].xfer_val |= BIT(22) |
217 (FROM_ACP_I2S_1 << 16) | (size / 2);
218 }
219 config_dma_descriptor_in_sram(acp_mmio, dma_dscr_idx,
220 &dmadscr[i]);
221 }
222 /* Configure the DMA channel with the above descriptore */
223 if (direction == SNDRV_PCM_STREAM_PLAYBACK)
224 config_acp_dma_channel(acp_mmio, ACP_TO_I2S_DMA_CH_NUM,
225 PLAYBACK_START_DMA_DESCR_CH13,
226 NUM_DSCRS_PER_CHANNEL,
227 ACP_DMA_PRIORITY_LEVEL_NORMAL);
228 else
229 config_acp_dma_channel(acp_mmio, I2S_TO_ACP_DMA_CH_NUM,
230 CAPTURE_START_DMA_DESCR_CH15,
231 NUM_DSCRS_PER_CHANNEL,
232 ACP_DMA_PRIORITY_LEVEL_NORMAL);
233 }
234
235 /* Create page table entries in ACP SRAM for the allocated memory */
236 static void acp_pte_config(void __iomem *acp_mmio, struct page *pg,
237 u16 num_of_pages, u32 pte_offset)
238 {
239 u16 page_idx;
240 u64 addr;
241 u32 low;
242 u32 high;
243 u32 offset;
244
245 offset = ACP_DAGB_GRP_SRBM_SRAM_BASE_OFFSET + (pte_offset * 8);
246 for (page_idx = 0; page_idx < (num_of_pages); page_idx++) {
247 /* Load the low address of page int ACP SRAM through SRBM */
248 acp_reg_write((offset + (page_idx * 8)),
249 acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
250 addr = page_to_phys(pg);
251
252 low = lower_32_bits(addr);
253 high = upper_32_bits(addr);
254
255 acp_reg_write(low, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
256
257 /* Load the High address of page int ACP SRAM through SRBM */
258 acp_reg_write((offset + (page_idx * 8) + 4),
259 acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
260
261 /* page enable in ACP */
262 high |= BIT(31);
263 acp_reg_write(high, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
264
265 /* Move to next physically contiguos page */
266 pg++;
267 }
268 }
269
270 static void config_acp_dma(void __iomem *acp_mmio,
271 struct audio_substream_data *audio_config)
272 {
273 u32 pte_offset;
274
275 if (audio_config->direction == SNDRV_PCM_STREAM_PLAYBACK)
276 pte_offset = ACP_PLAYBACK_PTE_OFFSET;
277 else
278 pte_offset = ACP_CAPTURE_PTE_OFFSET;
279
280 acp_pte_config(acp_mmio, audio_config->pg, audio_config->num_of_pages,
281 pte_offset);
282
283 /* Configure System memory <-> ACP SRAM DMA descriptors */
284 set_acp_sysmem_dma_descriptors(acp_mmio, audio_config->size,
285 audio_config->direction, pte_offset);
286
287 /* Configure ACP SRAM <-> I2S DMA descriptors */
288 set_acp_to_i2s_dma_descriptors(acp_mmio, audio_config->size,
289 audio_config->direction);
290 }
291
292 /* Start a given DMA channel transfer */
293 static void acp_dma_start(void __iomem *acp_mmio,
294 u16 ch_num, bool is_circular)
295 {
296 u32 dma_ctrl;
297
298 /* read the dma control register and disable the channel run field */
299 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
300
301 /* Invalidating the DAGB cache */
302 acp_reg_write(1, acp_mmio, mmACP_DAGB_ATU_CTRL);
303
304 /* configure the DMA channel and start the DMA transfer
305 * set dmachrun bit to start the transfer and enable the
306 * interrupt on completion of the dma transfer
307 */
308 dma_ctrl |= ACP_DMA_CNTL_0__DMAChRun_MASK;
309
310 switch (ch_num) {
311 case ACP_TO_I2S_DMA_CH_NUM:
312 case ACP_TO_SYSRAM_CH_NUM:
313 case I2S_TO_ACP_DMA_CH_NUM:
314 dma_ctrl |= ACP_DMA_CNTL_0__DMAChIOCEn_MASK;
315 break;
316 default:
317 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChIOCEn_MASK;
318 break;
319 }
320
321 /* enable for ACP SRAM to/from I2S DMA channel */
322 if (is_circular == true)
323 dma_ctrl |= ACP_DMA_CNTL_0__Circular_DMA_En_MASK;
324 else
325 dma_ctrl &= ~ACP_DMA_CNTL_0__Circular_DMA_En_MASK;
326
327 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
328 }
329
330 /* Stop a given DMA channel transfer */
331 static int acp_dma_stop(void __iomem *acp_mmio, u8 ch_num)
332 {
333 u32 dma_ctrl;
334 u32 dma_ch_sts;
335 u32 count = ACP_DMA_RESET_TIME;
336
337 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
338
339 /* clear the dma control register fields before writing zero
340 * in reset bit
341 */
342 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRun_MASK;
343 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChIOCEn_MASK;
344
345 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
346 dma_ch_sts = acp_reg_read(acp_mmio, mmACP_DMA_CH_STS);
347
348 if (dma_ch_sts & BIT(ch_num)) {
349 /* set the reset bit for this channel to stop the dma
350 * transfer
351 */
352 dma_ctrl |= ACP_DMA_CNTL_0__DMAChRst_MASK;
353 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
354 }
355
356 /* check the channel status bit for some time and return the status */
357 while (true) {
358 dma_ch_sts = acp_reg_read(acp_mmio, mmACP_DMA_CH_STS);
359 if (!(dma_ch_sts & BIT(ch_num))) {
360 /* clear the reset flag after successfully stopping
361 * the dma transfer and break from the loop
362 */
363 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRst_MASK;
364
365 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0
366 + ch_num);
367 break;
368 }
369 if (--count == 0) {
370 pr_err("Failed to stop ACP DMA channel : %d\n", ch_num);
371 return -ETIMEDOUT;
372 }
373 udelay(100);
374 }
375 return 0;
376 }
377
378 /* Initialize and bring ACP hardware to default state. */
379 static int acp_init(void __iomem *acp_mmio)
380 {
381 u32 val, count, sram_pte_offset;
382
383 /* Assert Soft reset of ACP */
384 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
385
386 val |= ACP_SOFT_RESET__SoftResetAud_MASK;
387 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET);
388
389 count = ACP_SOFT_RESET_DONE_TIME_OUT_VALUE;
390 while (true) {
391 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
392 if (ACP_SOFT_RESET__SoftResetAudDone_MASK ==
393 (val & ACP_SOFT_RESET__SoftResetAudDone_MASK))
394 break;
395 if (--count == 0) {
396 pr_err("Failed to reset ACP\n");
397 return -ETIMEDOUT;
398 }
399 udelay(100);
400 }
401
402 /* Enable clock to ACP and wait until the clock is enabled */
403 val = acp_reg_read(acp_mmio, mmACP_CONTROL);
404 val = val | ACP_CONTROL__ClkEn_MASK;
405 acp_reg_write(val, acp_mmio, mmACP_CONTROL);
406
407 count = ACP_CLOCK_EN_TIME_OUT_VALUE;
408
409 while (true) {
410 val = acp_reg_read(acp_mmio, mmACP_STATUS);
411 if (val & (u32) 0x1)
412 break;
413 if (--count == 0) {
414 pr_err("Failed to reset ACP\n");
415 return -ETIMEDOUT;
416 }
417 udelay(100);
418 }
419
420 /* Deassert the SOFT RESET flags */
421 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
422 val &= ~ACP_SOFT_RESET__SoftResetAud_MASK;
423 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET);
424
425 /* initiailize Onion control DAGB register */
426 acp_reg_write(ACP_ONION_CNTL_DEFAULT, acp_mmio,
427 mmACP_AXI2DAGB_ONION_CNTL);
428
429 /* initiailize Garlic control DAGB registers */
430 acp_reg_write(ACP_GARLIC_CNTL_DEFAULT, acp_mmio,
431 mmACP_AXI2DAGB_GARLIC_CNTL);
432
433 sram_pte_offset = ACP_DAGB_GRP_SRAM_BASE_ADDRESS |
434 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBSnoopSel_MASK |
435 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBTargetMemSel_MASK |
436 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBGrpEnable_MASK;
437 acp_reg_write(sram_pte_offset, acp_mmio, mmACP_DAGB_BASE_ADDR_GRP_1);
438 acp_reg_write(ACP_PAGE_SIZE_4K_ENABLE, acp_mmio,
439 mmACP_DAGB_PAGE_SIZE_GRP_1);
440
441 acp_reg_write(ACP_SRAM_BASE_ADDRESS, acp_mmio,
442 mmACP_DMA_DESC_BASE_ADDR);
443
444 /* Num of descriptiors in SRAM 0x4, means 256 descriptors;(64 * 4) */
445 acp_reg_write(0x4, acp_mmio, mmACP_DMA_DESC_MAX_NUM_DSCR);
446 acp_reg_write(ACP_EXTERNAL_INTR_CNTL__DMAIOCMask_MASK,
447 acp_mmio, mmACP_EXTERNAL_INTR_CNTL);
448
449 return 0;
450 }
451
452 /* Deintialize ACP */
453 static int acp_deinit(void __iomem *acp_mmio)
454 {
455 u32 val;
456 u32 count;
457
458 /* Assert Soft reset of ACP */
459 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
460
461 val |= ACP_SOFT_RESET__SoftResetAud_MASK;
462 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET);
463
464 count = ACP_SOFT_RESET_DONE_TIME_OUT_VALUE;
465 while (true) {
466 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
467 if (ACP_SOFT_RESET__SoftResetAudDone_MASK ==
468 (val & ACP_SOFT_RESET__SoftResetAudDone_MASK))
469 break;
470 if (--count == 0) {
471 pr_err("Failed to reset ACP\n");
472 return -ETIMEDOUT;
473 }
474 udelay(100);
475 }
476 /** Disable ACP clock */
477 val = acp_reg_read(acp_mmio, mmACP_CONTROL);
478 val &= ~ACP_CONTROL__ClkEn_MASK;
479 acp_reg_write(val, acp_mmio, mmACP_CONTROL);
480
481 count = ACP_CLOCK_EN_TIME_OUT_VALUE;
482
483 while (true) {
484 val = acp_reg_read(acp_mmio, mmACP_STATUS);
485 if (!(val & (u32) 0x1))
486 break;
487 if (--count == 0) {
488 pr_err("Failed to reset ACP\n");
489 return -ETIMEDOUT;
490 }
491 udelay(100);
492 }
493 return 0;
494 }
495
496 /* ACP DMA irq handler routine for playback, capture usecases */
497 static irqreturn_t dma_irq_handler(int irq, void *arg)
498 {
499 u16 dscr_idx;
500 u32 intr_flag, ext_intr_status;
501 struct audio_drv_data *irq_data;
502 void __iomem *acp_mmio;
503 struct device *dev = arg;
504 bool valid_irq = false;
505
506 irq_data = dev_get_drvdata(dev);
507 acp_mmio = irq_data->acp_mmio;
508
509 ext_intr_status = acp_reg_read(acp_mmio, mmACP_EXTERNAL_INTR_STAT);
510 intr_flag = (((ext_intr_status &
511 ACP_EXTERNAL_INTR_STAT__DMAIOCStat_MASK) >>
512 ACP_EXTERNAL_INTR_STAT__DMAIOCStat__SHIFT));
513
514 if ((intr_flag & BIT(ACP_TO_I2S_DMA_CH_NUM)) != 0) {
515 valid_irq = true;
516 if (acp_reg_read(acp_mmio, mmACP_DMA_CUR_DSCR_13) ==
517 PLAYBACK_START_DMA_DESCR_CH13)
518 dscr_idx = PLAYBACK_START_DMA_DESCR_CH12;
519 else
520 dscr_idx = PLAYBACK_END_DMA_DESCR_CH12;
521 config_acp_dma_channel(acp_mmio, SYSRAM_TO_ACP_CH_NUM, dscr_idx,
522 1, 0);
523 acp_dma_start(acp_mmio, SYSRAM_TO_ACP_CH_NUM, false);
524
525 snd_pcm_period_elapsed(irq_data->play_stream);
526
527 acp_reg_write((intr_flag & BIT(ACP_TO_I2S_DMA_CH_NUM)) << 16,
528 acp_mmio, mmACP_EXTERNAL_INTR_STAT);
529 }
530
531 if ((intr_flag & BIT(I2S_TO_ACP_DMA_CH_NUM)) != 0) {
532 valid_irq = true;
533 if (acp_reg_read(acp_mmio, mmACP_DMA_CUR_DSCR_15) ==
534 CAPTURE_START_DMA_DESCR_CH15)
535 dscr_idx = CAPTURE_END_DMA_DESCR_CH14;
536 else
537 dscr_idx = CAPTURE_START_DMA_DESCR_CH14;
538 config_acp_dma_channel(acp_mmio, ACP_TO_SYSRAM_CH_NUM, dscr_idx,
539 1, 0);
540 acp_dma_start(acp_mmio, ACP_TO_SYSRAM_CH_NUM, false);
541
542 acp_reg_write((intr_flag & BIT(I2S_TO_ACP_DMA_CH_NUM)) << 16,
543 acp_mmio, mmACP_EXTERNAL_INTR_STAT);
544 }
545
546 if ((intr_flag & BIT(ACP_TO_SYSRAM_CH_NUM)) != 0) {
547 valid_irq = true;
548 snd_pcm_period_elapsed(irq_data->capture_stream);
549 acp_reg_write((intr_flag & BIT(ACP_TO_SYSRAM_CH_NUM)) << 16,
550 acp_mmio, mmACP_EXTERNAL_INTR_STAT);
551 }
552
553 if (valid_irq)
554 return IRQ_HANDLED;
555 else
556 return IRQ_NONE;
557 }
558
559 static int acp_dma_open(struct snd_pcm_substream *substream)
560 {
561 int ret = 0;
562 struct snd_pcm_runtime *runtime = substream->runtime;
563 struct snd_soc_pcm_runtime *prtd = substream->private_data;
564 struct audio_drv_data *intr_data = dev_get_drvdata(prtd->platform->dev);
565
566 struct audio_substream_data *adata =
567 kzalloc(sizeof(struct audio_substream_data), GFP_KERNEL);
568 if (adata == NULL)
569 return -ENOMEM;
570
571 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
572 runtime->hw = acp_pcm_hardware_playback;
573 else
574 runtime->hw = acp_pcm_hardware_capture;
575
576 ret = snd_pcm_hw_constraint_integer(runtime,
577 SNDRV_PCM_HW_PARAM_PERIODS);
578 if (ret < 0) {
579 dev_err(prtd->platform->dev, "set integer constraint failed\n");
580 return ret;
581 }
582
583 adata->acp_mmio = intr_data->acp_mmio;
584 runtime->private_data = adata;
585
586 /* Enable ACP irq, when neither playback or capture streams are
587 * active by the time when a new stream is being opened.
588 * This enablement is not required for another stream, if current
589 * stream is not closed
590 */
591 if (!intr_data->play_stream && !intr_data->capture_stream)
592 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
593
594 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
595 intr_data->play_stream = substream;
596 else
597 intr_data->capture_stream = substream;
598
599 return 0;
600 }
601
602 static int acp_dma_hw_params(struct snd_pcm_substream *substream,
603 struct snd_pcm_hw_params *params)
604 {
605 int status;
606 uint64_t size;
607 struct snd_dma_buffer *dma_buffer;
608 struct page *pg;
609 struct snd_pcm_runtime *runtime;
610 struct audio_substream_data *rtd;
611
612 dma_buffer = &substream->dma_buffer;
613
614 runtime = substream->runtime;
615 rtd = runtime->private_data;
616
617 if (WARN_ON(!rtd))
618 return -EINVAL;
619
620 size = params_buffer_bytes(params);
621 status = snd_pcm_lib_malloc_pages(substream, size);
622 if (status < 0)
623 return status;
624
625 memset(substream->runtime->dma_area, 0, params_buffer_bytes(params));
626 pg = virt_to_page(substream->dma_buffer.area);
627
628 if (pg != NULL) {
629 /* Save for runtime private data */
630 rtd->pg = pg;
631 rtd->order = get_order(size);
632
633 /* Fill the page table entries in ACP SRAM */
634 rtd->pg = pg;
635 rtd->size = size;
636 rtd->num_of_pages = PAGE_ALIGN(size) >> PAGE_SHIFT;
637 rtd->direction = substream->stream;
638
639 config_acp_dma(rtd->acp_mmio, rtd);
640 status = 0;
641 } else {
642 status = -ENOMEM;
643 }
644 return status;
645 }
646
647 static int acp_dma_hw_free(struct snd_pcm_substream *substream)
648 {
649 return snd_pcm_lib_free_pages(substream);
650 }
651
652 static snd_pcm_uframes_t acp_dma_pointer(struct snd_pcm_substream *substream)
653 {
654 u16 dscr;
655 u32 mul, dma_config, period_bytes;
656 u32 pos = 0;
657
658 struct snd_pcm_runtime *runtime = substream->runtime;
659 struct audio_substream_data *rtd = runtime->private_data;
660
661 period_bytes = frames_to_bytes(runtime, runtime->period_size);
662 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
663 dscr = acp_reg_read(rtd->acp_mmio, mmACP_DMA_CUR_DSCR_13);
664
665 if (dscr == PLAYBACK_START_DMA_DESCR_CH13)
666 mul = 0;
667 else
668 mul = 1;
669 pos = (mul * period_bytes);
670 } else {
671 dma_config = acp_reg_read(rtd->acp_mmio, mmACP_DMA_CNTL_14);
672 if (dma_config != 0) {
673 dscr = acp_reg_read(rtd->acp_mmio,
674 mmACP_DMA_CUR_DSCR_14);
675 if (dscr == CAPTURE_START_DMA_DESCR_CH14)
676 mul = 1;
677 else
678 mul = 2;
679 pos = (mul * period_bytes);
680 }
681
682 if (pos >= (2 * period_bytes))
683 pos = 0;
684
685 }
686 return bytes_to_frames(runtime, pos);
687 }
688
689 static int acp_dma_mmap(struct snd_pcm_substream *substream,
690 struct vm_area_struct *vma)
691 {
692 return snd_pcm_lib_default_mmap(substream, vma);
693 }
694
695 static int acp_dma_prepare(struct snd_pcm_substream *substream)
696 {
697 struct snd_pcm_runtime *runtime = substream->runtime;
698 struct audio_substream_data *rtd = runtime->private_data;
699
700 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
701 config_acp_dma_channel(rtd->acp_mmio, SYSRAM_TO_ACP_CH_NUM,
702 PLAYBACK_START_DMA_DESCR_CH12,
703 NUM_DSCRS_PER_CHANNEL, 0);
704 config_acp_dma_channel(rtd->acp_mmio, ACP_TO_I2S_DMA_CH_NUM,
705 PLAYBACK_START_DMA_DESCR_CH13,
706 NUM_DSCRS_PER_CHANNEL, 0);
707 /* Fill ACP SRAM (2 periods) with zeros from System RAM
708 * which is zero-ed in hw_params
709 */
710 acp_dma_start(rtd->acp_mmio, SYSRAM_TO_ACP_CH_NUM, false);
711
712 /* ACP SRAM (2 periods of buffer size) is intially filled with
713 * zeros. Before rendering starts, 2nd half of SRAM will be
714 * filled with valid audio data DMA'ed from first half of system
715 * RAM and 1st half of SRAM will be filled with Zeros. This is
716 * the initial scenario when redering starts from SRAM. Later
717 * on, 2nd half of system memory will be DMA'ed to 1st half of
718 * SRAM, 1st half of system memory will be DMA'ed to 2nd half of
719 * SRAM in ping-pong way till rendering stops.
720 */
721 config_acp_dma_channel(rtd->acp_mmio, SYSRAM_TO_ACP_CH_NUM,
722 PLAYBACK_START_DMA_DESCR_CH12,
723 1, 0);
724 } else {
725 config_acp_dma_channel(rtd->acp_mmio, ACP_TO_SYSRAM_CH_NUM,
726 CAPTURE_START_DMA_DESCR_CH14,
727 NUM_DSCRS_PER_CHANNEL, 0);
728 config_acp_dma_channel(rtd->acp_mmio, I2S_TO_ACP_DMA_CH_NUM,
729 CAPTURE_START_DMA_DESCR_CH15,
730 NUM_DSCRS_PER_CHANNEL, 0);
731 }
732 return 0;
733 }
734
735 static int acp_dma_trigger(struct snd_pcm_substream *substream, int cmd)
736 {
737 int ret;
738 u32 loops = 1000;
739
740 struct snd_pcm_runtime *runtime = substream->runtime;
741 struct snd_soc_pcm_runtime *prtd = substream->private_data;
742 struct audio_substream_data *rtd = runtime->private_data;
743
744 if (!rtd)
745 return -EINVAL;
746 switch (cmd) {
747 case SNDRV_PCM_TRIGGER_START:
748 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
749 case SNDRV_PCM_TRIGGER_RESUME:
750 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
751 acp_dma_start(rtd->acp_mmio,
752 SYSRAM_TO_ACP_CH_NUM, false);
753 while (acp_reg_read(rtd->acp_mmio, mmACP_DMA_CH_STS) &
754 BIT(SYSRAM_TO_ACP_CH_NUM)) {
755 if (!loops--) {
756 dev_err(prtd->platform->dev,
757 "acp dma start timeout\n");
758 return -ETIMEDOUT;
759 }
760 cpu_relax();
761 }
762
763 acp_dma_start(rtd->acp_mmio,
764 ACP_TO_I2S_DMA_CH_NUM, true);
765
766 } else {
767 acp_dma_start(rtd->acp_mmio,
768 I2S_TO_ACP_DMA_CH_NUM, true);
769 }
770 ret = 0;
771 break;
772 case SNDRV_PCM_TRIGGER_STOP:
773 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
774 case SNDRV_PCM_TRIGGER_SUSPEND:
775 /* Need to stop only circular DMA channels :
776 * ACP_TO_I2S_DMA_CH_NUM / I2S_TO_ACP_DMA_CH_NUM. Non-circular
777 * channels will stopped automatically after its transfer
778 * completes : SYSRAM_TO_ACP_CH_NUM / ACP_TO_SYSRAM_CH_NUM
779 */
780 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
781 ret = acp_dma_stop(rtd->acp_mmio,
782 ACP_TO_I2S_DMA_CH_NUM);
783 else
784 ret = acp_dma_stop(rtd->acp_mmio,
785 I2S_TO_ACP_DMA_CH_NUM);
786 break;
787 default:
788 ret = -EINVAL;
789
790 }
791 return ret;
792 }
793
794 static int acp_dma_new(struct snd_soc_pcm_runtime *rtd)
795 {
796 return snd_pcm_lib_preallocate_pages_for_all(rtd->pcm,
797 SNDRV_DMA_TYPE_DEV,
798 NULL, MIN_BUFFER,
799 MAX_BUFFER);
800 }
801
802 static int acp_dma_close(struct snd_pcm_substream *substream)
803 {
804 struct snd_pcm_runtime *runtime = substream->runtime;
805 struct audio_substream_data *rtd = runtime->private_data;
806 struct snd_soc_pcm_runtime *prtd = substream->private_data;
807 struct audio_drv_data *adata = dev_get_drvdata(prtd->platform->dev);
808
809 kfree(rtd);
810
811 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
812 adata->play_stream = NULL;
813 else
814 adata->capture_stream = NULL;
815
816 /* Disable ACP irq, when the current stream is being closed and
817 * another stream is also not active.
818 */
819 if (!adata->play_stream && !adata->capture_stream)
820 acp_reg_write(0, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
821
822 return 0;
823 }
824
825 static struct snd_pcm_ops acp_dma_ops = {
826 .open = acp_dma_open,
827 .close = acp_dma_close,
828 .ioctl = snd_pcm_lib_ioctl,
829 .hw_params = acp_dma_hw_params,
830 .hw_free = acp_dma_hw_free,
831 .trigger = acp_dma_trigger,
832 .pointer = acp_dma_pointer,
833 .mmap = acp_dma_mmap,
834 .prepare = acp_dma_prepare,
835 };
836
837 static struct snd_soc_platform_driver acp_asoc_platform = {
838 .ops = &acp_dma_ops,
839 .pcm_new = acp_dma_new,
840 };
841
842 static int acp_audio_probe(struct platform_device *pdev)
843 {
844 int status;
845 struct audio_drv_data *audio_drv_data;
846 struct resource *res;
847
848 audio_drv_data = devm_kzalloc(&pdev->dev, sizeof(struct audio_drv_data),
849 GFP_KERNEL);
850 if (audio_drv_data == NULL)
851 return -ENOMEM;
852
853 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
854 audio_drv_data->acp_mmio = devm_ioremap_resource(&pdev->dev, res);
855
856 /* The following members gets populated in device 'open'
857 * function. Till then interrupts are disabled in 'acp_init'
858 * and device doesn't generate any interrupts.
859 */
860
861 audio_drv_data->play_stream = NULL;
862 audio_drv_data->capture_stream = NULL;
863
864 res = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
865 if (!res) {
866 dev_err(&pdev->dev, "IORESOURCE_IRQ FAILED\n");
867 return -ENODEV;
868 }
869
870 status = devm_request_irq(&pdev->dev, res->start, dma_irq_handler,
871 0, "ACP_IRQ", &pdev->dev);
872 if (status) {
873 dev_err(&pdev->dev, "ACP IRQ request failed\n");
874 return status;
875 }
876
877 dev_set_drvdata(&pdev->dev, audio_drv_data);
878
879 /* Initialize the ACP */
880 acp_init(audio_drv_data->acp_mmio);
881
882 status = snd_soc_register_platform(&pdev->dev, &acp_asoc_platform);
883 if (status != 0) {
884 dev_err(&pdev->dev, "Fail to register ALSA platform device\n");
885 return status;
886 }
887
888 return status;
889 }
890
891 static int acp_audio_remove(struct platform_device *pdev)
892 {
893 struct audio_drv_data *adata = dev_get_drvdata(&pdev->dev);
894
895 acp_deinit(adata->acp_mmio);
896 snd_soc_unregister_platform(&pdev->dev);
897
898 return 0;
899 }
900
901 static struct platform_driver acp_dma_driver = {
902 .probe = acp_audio_probe,
903 .remove = acp_audio_remove,
904 .driver = {
905 .name = "acp_audio_dma",
906 },
907 };
908
909 module_platform_driver(acp_dma_driver);
910
911 MODULE_AUTHOR("Maruthi.Bayyavarapu@amd.com");
912 MODULE_DESCRIPTION("AMD ACP PCM Driver");
913 MODULE_LICENSE("GPL v2");
914 MODULE_ALIAS("platform:acp-dma-audio");
This page took 0.049875 seconds and 5 git commands to generate.