2 * AMD ALSA SoC PCM Driver for ACP 2.x
4 * Copyright 2014-2015 Advanced Micro Devices, Inc.
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms and conditions of the GNU General Public License,
8 * version 2, as published by the Free Software Foundation.
10 * This program is distributed in the hope it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
16 #include <linux/module.h>
17 #include <linux/delay.h>
19 #include <linux/sizes.h>
20 #include <linux/pm_runtime.h>
22 #include <sound/soc.h>
23 #include <drm/amd_asic_type.h>
26 #define DRV_NAME "acp_audio_dma"
28 #define PLAYBACK_MIN_NUM_PERIODS 2
29 #define PLAYBACK_MAX_NUM_PERIODS 2
30 #define PLAYBACK_MAX_PERIOD_SIZE 16384
31 #define PLAYBACK_MIN_PERIOD_SIZE 1024
32 #define CAPTURE_MIN_NUM_PERIODS 2
33 #define CAPTURE_MAX_NUM_PERIODS 2
34 #define CAPTURE_MAX_PERIOD_SIZE 16384
35 #define CAPTURE_MIN_PERIOD_SIZE 1024
37 #define MAX_BUFFER (PLAYBACK_MAX_PERIOD_SIZE * PLAYBACK_MAX_NUM_PERIODS)
38 #define MIN_BUFFER MAX_BUFFER
40 #define ST_PLAYBACK_MAX_PERIOD_SIZE 8192
41 #define ST_CAPTURE_MAX_PERIOD_SIZE ST_PLAYBACK_MAX_PERIOD_SIZE
42 #define ST_MAX_BUFFER (ST_PLAYBACK_MAX_PERIOD_SIZE * PLAYBACK_MAX_NUM_PERIODS)
43 #define ST_MIN_BUFFER ST_MAX_BUFFER
45 #define DRV_NAME "acp_audio_dma"
47 static const struct snd_pcm_hardware acp_pcm_hardware_playback = {
48 .info = SNDRV_PCM_INFO_INTERLEAVED |
49 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
50 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
51 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
52 .formats = SNDRV_PCM_FMTBIT_S16_LE |
53 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
56 .rates = SNDRV_PCM_RATE_8000_96000,
59 .buffer_bytes_max = PLAYBACK_MAX_NUM_PERIODS * PLAYBACK_MAX_PERIOD_SIZE,
60 .period_bytes_min = PLAYBACK_MIN_PERIOD_SIZE,
61 .period_bytes_max = PLAYBACK_MAX_PERIOD_SIZE,
62 .periods_min = PLAYBACK_MIN_NUM_PERIODS,
63 .periods_max = PLAYBACK_MAX_NUM_PERIODS,
66 static const struct snd_pcm_hardware acp_pcm_hardware_capture = {
67 .info = SNDRV_PCM_INFO_INTERLEAVED |
68 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
69 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
70 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
71 .formats = SNDRV_PCM_FMTBIT_S16_LE |
72 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
75 .rates = SNDRV_PCM_RATE_8000_48000,
78 .buffer_bytes_max = CAPTURE_MAX_NUM_PERIODS * CAPTURE_MAX_PERIOD_SIZE,
79 .period_bytes_min = CAPTURE_MIN_PERIOD_SIZE,
80 .period_bytes_max = CAPTURE_MAX_PERIOD_SIZE,
81 .periods_min = CAPTURE_MIN_NUM_PERIODS,
82 .periods_max = CAPTURE_MAX_NUM_PERIODS,
85 static const struct snd_pcm_hardware acp_st_pcm_hardware_playback = {
86 .info = SNDRV_PCM_INFO_INTERLEAVED |
87 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
88 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
89 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
90 .formats = SNDRV_PCM_FMTBIT_S16_LE |
91 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
94 .rates = SNDRV_PCM_RATE_8000_96000,
97 .buffer_bytes_max = ST_MAX_BUFFER,
98 .period_bytes_min = PLAYBACK_MIN_PERIOD_SIZE,
99 .period_bytes_max = ST_PLAYBACK_MAX_PERIOD_SIZE,
100 .periods_min = PLAYBACK_MIN_NUM_PERIODS,
101 .periods_max = PLAYBACK_MAX_NUM_PERIODS,
104 static const struct snd_pcm_hardware acp_st_pcm_hardware_capture = {
105 .info = SNDRV_PCM_INFO_INTERLEAVED |
106 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
107 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
108 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
109 .formats = SNDRV_PCM_FMTBIT_S16_LE |
110 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
113 .rates = SNDRV_PCM_RATE_8000_48000,
116 .buffer_bytes_max = ST_MAX_BUFFER,
117 .period_bytes_min = CAPTURE_MIN_PERIOD_SIZE,
118 .period_bytes_max = ST_CAPTURE_MAX_PERIOD_SIZE,
119 .periods_min = CAPTURE_MIN_NUM_PERIODS,
120 .periods_max = CAPTURE_MAX_NUM_PERIODS,
123 static u32 acp_reg_read(void __iomem *acp_mmio, u32 reg)
125 return readl(acp_mmio + (reg * 4));
128 static void acp_reg_write(u32 val, void __iomem *acp_mmio, u32 reg)
130 writel(val, acp_mmio + (reg * 4));
134 * Configure a given dma channel parameters - enable/disable,
135 * number of descriptors, priority
137 static void config_acp_dma_channel(void __iomem *acp_mmio, u8 ch_num,
138 u16 dscr_strt_idx, u16 num_dscrs,
139 enum acp_dma_priority_level priority_level)
143 /* disable the channel run field */
144 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
145 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRun_MASK;
146 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
148 /* program a DMA channel with first descriptor to be processed. */
149 acp_reg_write((ACP_DMA_DSCR_STRT_IDX_0__DMAChDscrStrtIdx_MASK
151 acp_mmio, mmACP_DMA_DSCR_STRT_IDX_0 + ch_num);
154 * program a DMA channel with the number of descriptors to be
155 * processed in the transfer
157 acp_reg_write(ACP_DMA_DSCR_CNT_0__DMAChDscrCnt_MASK & num_dscrs,
158 acp_mmio, mmACP_DMA_DSCR_CNT_0 + ch_num);
160 /* set DMA channel priority */
161 acp_reg_write(priority_level, acp_mmio, mmACP_DMA_PRIO_0 + ch_num);
164 /* Initialize a dma descriptor in SRAM based on descritor information passed */
165 static void config_dma_descriptor_in_sram(void __iomem *acp_mmio,
167 acp_dma_dscr_transfer_t *descr_info)
171 sram_offset = (descr_idx * sizeof(acp_dma_dscr_transfer_t));
173 /* program the source base address. */
174 acp_reg_write(sram_offset, acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
175 acp_reg_write(descr_info->src, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
176 /* program the destination base address. */
177 acp_reg_write(sram_offset + 4, acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
178 acp_reg_write(descr_info->dest, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
180 /* program the number of bytes to be transferred for this descriptor. */
181 acp_reg_write(sram_offset + 8, acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
182 acp_reg_write(descr_info->xfer_val, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
186 * Initialize the DMA descriptor information for transfer between
187 * system memory <-> ACP SRAM
189 static void set_acp_sysmem_dma_descriptors(void __iomem *acp_mmio,
190 u32 size, int direction,
191 u32 pte_offset, u16 ch,
192 u32 sram_bank, u16 dma_dscr_idx,
196 acp_dma_dscr_transfer_t dmadscr[NUM_DSCRS_PER_CHANNEL];
198 for (i = 0; i < NUM_DSCRS_PER_CHANNEL; i++) {
199 dmadscr[i].xfer_val = 0;
200 if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
201 dma_dscr_idx = dma_dscr_idx + i;
202 dmadscr[i].dest = sram_bank + (i * (size / 2));
203 dmadscr[i].src = ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS
204 + (pte_offset * SZ_4K) + (i * (size / 2));
207 dmadscr[i].xfer_val |=
208 (ACP_DMA_ATTR_DAGB_GARLIC_TO_SHAREDMEM << 16) |
212 dmadscr[i].xfer_val |=
213 (ACP_DMA_ATTR_DAGB_ONION_TO_SHAREDMEM << 16) |
217 dma_dscr_idx = dma_dscr_idx + i;
218 dmadscr[i].src = sram_bank + (i * (size / 2));
220 ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS +
221 (pte_offset * SZ_4K) + (i * (size / 2));
224 dmadscr[i].xfer_val |=
226 (ACP_DMA_ATTR_SHARED_MEM_TO_DAGB_GARLIC << 16) |
230 dmadscr[i].xfer_val |=
232 (ACP_DMA_ATTR_SHAREDMEM_TO_DAGB_ONION << 16) |
236 config_dma_descriptor_in_sram(acp_mmio, dma_dscr_idx,
239 config_acp_dma_channel(acp_mmio, ch,
241 NUM_DSCRS_PER_CHANNEL,
242 ACP_DMA_PRIORITY_LEVEL_NORMAL);
246 * Initialize the DMA descriptor information for transfer between
249 static void set_acp_to_i2s_dma_descriptors(void __iomem *acp_mmio, u32 size,
250 int direction, u32 sram_bank,
251 u16 destination, u16 ch,
252 u16 dma_dscr_idx, u32 asic_type)
255 acp_dma_dscr_transfer_t dmadscr[NUM_DSCRS_PER_CHANNEL];
257 for (i = 0; i < NUM_DSCRS_PER_CHANNEL; i++) {
258 dmadscr[i].xfer_val = 0;
259 if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
260 dma_dscr_idx = dma_dscr_idx + i;
261 dmadscr[i].src = sram_bank + (i * (size / 2));
262 /* dmadscr[i].dest is unused by hardware. */
264 dmadscr[i].xfer_val |= BIT(22) | (destination << 16) |
267 dma_dscr_idx = dma_dscr_idx + i;
268 /* dmadscr[i].src is unused by hardware. */
271 sram_bank + (i * (size / 2));
272 dmadscr[i].xfer_val |= BIT(22) |
273 (destination << 16) | (size / 2);
275 config_dma_descriptor_in_sram(acp_mmio, dma_dscr_idx,
278 /* Configure the DMA channel with the above descriptore */
279 config_acp_dma_channel(acp_mmio, ch, dma_dscr_idx - 1,
280 NUM_DSCRS_PER_CHANNEL,
281 ACP_DMA_PRIORITY_LEVEL_NORMAL);
284 /* Create page table entries in ACP SRAM for the allocated memory */
285 static void acp_pte_config(void __iomem *acp_mmio, struct page *pg,
286 u16 num_of_pages, u32 pte_offset)
294 offset = ACP_DAGB_GRP_SRBM_SRAM_BASE_OFFSET + (pte_offset * 8);
295 for (page_idx = 0; page_idx < (num_of_pages); page_idx++) {
296 /* Load the low address of page int ACP SRAM through SRBM */
297 acp_reg_write((offset + (page_idx * 8)),
298 acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
299 addr = page_to_phys(pg);
301 low = lower_32_bits(addr);
302 high = upper_32_bits(addr);
304 acp_reg_write(low, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
306 /* Load the High address of page int ACP SRAM through SRBM */
307 acp_reg_write((offset + (page_idx * 8) + 4),
308 acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
310 /* page enable in ACP */
312 acp_reg_write(high, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
314 /* Move to next physically contiguos page */
319 static void config_acp_dma(void __iomem *acp_mmio,
320 struct audio_substream_data *rtd,
323 acp_pte_config(acp_mmio, rtd->pg, rtd->num_of_pages,
325 /* Configure System memory <-> ACP SRAM DMA descriptors */
326 set_acp_sysmem_dma_descriptors(acp_mmio, rtd->size,
327 rtd->direction, rtd->pte_offset,
328 rtd->ch1, rtd->sram_bank,
329 rtd->dma_dscr_idx_1, asic_type);
330 /* Configure ACP SRAM <-> I2S DMA descriptors */
331 set_acp_to_i2s_dma_descriptors(acp_mmio, rtd->size,
332 rtd->direction, rtd->sram_bank,
333 rtd->destination, rtd->ch2,
334 rtd->dma_dscr_idx_2, asic_type);
337 /* Start a given DMA channel transfer */
338 static void acp_dma_start(void __iomem *acp_mmio,
339 u16 ch_num, bool is_circular)
343 /* read the dma control register and disable the channel run field */
344 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
346 /* Invalidating the DAGB cache */
347 acp_reg_write(1, acp_mmio, mmACP_DAGB_ATU_CTRL);
350 * configure the DMA channel and start the DMA transfer
351 * set dmachrun bit to start the transfer and enable the
352 * interrupt on completion of the dma transfer
354 dma_ctrl |= ACP_DMA_CNTL_0__DMAChRun_MASK;
357 case ACP_TO_I2S_DMA_CH_NUM:
358 case ACP_TO_SYSRAM_CH_NUM:
359 case I2S_TO_ACP_DMA_CH_NUM:
360 dma_ctrl |= ACP_DMA_CNTL_0__DMAChIOCEn_MASK;
363 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChIOCEn_MASK;
367 /* enable for ACP SRAM to/from I2S DMA channel */
368 if (is_circular == true)
369 dma_ctrl |= ACP_DMA_CNTL_0__Circular_DMA_En_MASK;
371 dma_ctrl &= ~ACP_DMA_CNTL_0__Circular_DMA_En_MASK;
373 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
376 /* Stop a given DMA channel transfer */
377 static int acp_dma_stop(void __iomem *acp_mmio, u8 ch_num)
381 u32 count = ACP_DMA_RESET_TIME;
383 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
386 * clear the dma control register fields before writing zero
389 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRun_MASK;
390 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChIOCEn_MASK;
392 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
393 dma_ch_sts = acp_reg_read(acp_mmio, mmACP_DMA_CH_STS);
395 if (dma_ch_sts & BIT(ch_num)) {
397 * set the reset bit for this channel to stop the dma
400 dma_ctrl |= ACP_DMA_CNTL_0__DMAChRst_MASK;
401 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
404 /* check the channel status bit for some time and return the status */
406 dma_ch_sts = acp_reg_read(acp_mmio, mmACP_DMA_CH_STS);
407 if (!(dma_ch_sts & BIT(ch_num))) {
409 * clear the reset flag after successfully stopping
410 * the dma transfer and break from the loop
412 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRst_MASK;
414 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0
419 pr_err("Failed to stop ACP DMA channel : %d\n", ch_num);
427 static void acp_set_sram_bank_state(void __iomem *acp_mmio, u16 bank,
430 u32 val, req_reg, sts_reg, sts_reg_mask;
434 req_reg = mmACP_MEM_SHUT_DOWN_REQ_LO;
435 sts_reg = mmACP_MEM_SHUT_DOWN_STS_LO;
436 sts_reg_mask = 0xFFFFFFFF;
440 req_reg = mmACP_MEM_SHUT_DOWN_REQ_HI;
441 sts_reg = mmACP_MEM_SHUT_DOWN_STS_HI;
442 sts_reg_mask = 0x0000FFFF;
445 val = acp_reg_read(acp_mmio, req_reg);
446 if (val & (1 << bank)) {
447 /* bank is in off state */
448 if (power_on == true)
455 /* bank is in on state */
456 if (power_on == false)
463 acp_reg_write(val, acp_mmio, req_reg);
465 while (acp_reg_read(acp_mmio, sts_reg) != sts_reg_mask) {
467 pr_err("ACP SRAM bank %d state change failed\n", bank);
474 /* Initialize and bring ACP hardware to default state. */
475 static int acp_init(void __iomem *acp_mmio, u32 asic_type)
478 u32 val, count, sram_pte_offset;
480 /* Assert Soft reset of ACP */
481 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
483 val |= ACP_SOFT_RESET__SoftResetAud_MASK;
484 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET);
486 count = ACP_SOFT_RESET_DONE_TIME_OUT_VALUE;
488 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
489 if (ACP_SOFT_RESET__SoftResetAudDone_MASK ==
490 (val & ACP_SOFT_RESET__SoftResetAudDone_MASK))
493 pr_err("Failed to reset ACP\n");
499 /* Enable clock to ACP and wait until the clock is enabled */
500 val = acp_reg_read(acp_mmio, mmACP_CONTROL);
501 val = val | ACP_CONTROL__ClkEn_MASK;
502 acp_reg_write(val, acp_mmio, mmACP_CONTROL);
504 count = ACP_CLOCK_EN_TIME_OUT_VALUE;
507 val = acp_reg_read(acp_mmio, mmACP_STATUS);
511 pr_err("Failed to reset ACP\n");
517 /* Deassert the SOFT RESET flags */
518 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
519 val &= ~ACP_SOFT_RESET__SoftResetAud_MASK;
520 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET);
522 /* initiailize Onion control DAGB register */
523 acp_reg_write(ACP_ONION_CNTL_DEFAULT, acp_mmio,
524 mmACP_AXI2DAGB_ONION_CNTL);
526 /* initiailize Garlic control DAGB registers */
527 acp_reg_write(ACP_GARLIC_CNTL_DEFAULT, acp_mmio,
528 mmACP_AXI2DAGB_GARLIC_CNTL);
530 sram_pte_offset = ACP_DAGB_GRP_SRAM_BASE_ADDRESS |
531 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBSnoopSel_MASK |
532 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBTargetMemSel_MASK |
533 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBGrpEnable_MASK;
534 acp_reg_write(sram_pte_offset, acp_mmio, mmACP_DAGB_BASE_ADDR_GRP_1);
535 acp_reg_write(ACP_PAGE_SIZE_4K_ENABLE, acp_mmio,
536 mmACP_DAGB_PAGE_SIZE_GRP_1);
538 acp_reg_write(ACP_SRAM_BASE_ADDRESS, acp_mmio,
539 mmACP_DMA_DESC_BASE_ADDR);
541 /* Num of descriptiors in SRAM 0x4, means 256 descriptors;(64 * 4) */
542 acp_reg_write(0x4, acp_mmio, mmACP_DMA_DESC_MAX_NUM_DSCR);
543 acp_reg_write(ACP_EXTERNAL_INTR_CNTL__DMAIOCMask_MASK,
544 acp_mmio, mmACP_EXTERNAL_INTR_CNTL);
547 * When ACP_TILE_P1 is turned on, all SRAM banks get turned on.
548 * Now, turn off all of them. This can't be done in 'poweron' of
549 * ACP pm domain, as this requires ACP to be initialized.
550 * For Stoney, Memory gating is disabled,i.e SRAM Banks
551 * won't be turned off. The default state for SRAM banks is ON.
552 * Setting SRAM bank state code skipped for STONEY platform.
554 if (asic_type != CHIP_STONEY) {
555 for (bank = 1; bank < 48; bank++)
556 acp_set_sram_bank_state(acp_mmio, bank, false);
561 /* Deinitialize ACP */
562 static int acp_deinit(void __iomem *acp_mmio)
567 /* Assert Soft reset of ACP */
568 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
570 val |= ACP_SOFT_RESET__SoftResetAud_MASK;
571 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET);
573 count = ACP_SOFT_RESET_DONE_TIME_OUT_VALUE;
575 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
576 if (ACP_SOFT_RESET__SoftResetAudDone_MASK ==
577 (val & ACP_SOFT_RESET__SoftResetAudDone_MASK))
580 pr_err("Failed to reset ACP\n");
585 /* Disable ACP clock */
586 val = acp_reg_read(acp_mmio, mmACP_CONTROL);
587 val &= ~ACP_CONTROL__ClkEn_MASK;
588 acp_reg_write(val, acp_mmio, mmACP_CONTROL);
590 count = ACP_CLOCK_EN_TIME_OUT_VALUE;
593 val = acp_reg_read(acp_mmio, mmACP_STATUS);
594 if (!(val & (u32)0x1))
597 pr_err("Failed to reset ACP\n");
605 /* ACP DMA irq handler routine for playback, capture usecases */
606 static irqreturn_t dma_irq_handler(int irq, void *arg)
609 u32 intr_flag, ext_intr_status;
610 struct audio_drv_data *irq_data;
611 void __iomem *acp_mmio;
612 struct device *dev = arg;
613 bool valid_irq = false;
615 irq_data = dev_get_drvdata(dev);
616 acp_mmio = irq_data->acp_mmio;
618 ext_intr_status = acp_reg_read(acp_mmio, mmACP_EXTERNAL_INTR_STAT);
619 intr_flag = (((ext_intr_status &
620 ACP_EXTERNAL_INTR_STAT__DMAIOCStat_MASK) >>
621 ACP_EXTERNAL_INTR_STAT__DMAIOCStat__SHIFT));
623 if ((intr_flag & BIT(ACP_TO_I2S_DMA_CH_NUM)) != 0) {
625 if (acp_reg_read(acp_mmio, mmACP_DMA_CUR_DSCR_13) ==
626 PLAYBACK_START_DMA_DESCR_CH13)
627 dscr_idx = PLAYBACK_END_DMA_DESCR_CH12;
629 dscr_idx = PLAYBACK_START_DMA_DESCR_CH12;
630 config_acp_dma_channel(acp_mmio, SYSRAM_TO_ACP_CH_NUM, dscr_idx,
632 acp_dma_start(acp_mmio, SYSRAM_TO_ACP_CH_NUM, false);
634 snd_pcm_period_elapsed(irq_data->play_i2ssp_stream);
636 acp_reg_write((intr_flag & BIT(ACP_TO_I2S_DMA_CH_NUM)) << 16,
637 acp_mmio, mmACP_EXTERNAL_INTR_STAT);
640 if ((intr_flag & BIT(I2S_TO_ACP_DMA_CH_NUM)) != 0) {
642 if (acp_reg_read(acp_mmio, mmACP_DMA_CUR_DSCR_15) ==
643 CAPTURE_START_DMA_DESCR_CH15)
644 dscr_idx = CAPTURE_END_DMA_DESCR_CH14;
646 dscr_idx = CAPTURE_START_DMA_DESCR_CH14;
647 config_acp_dma_channel(acp_mmio, ACP_TO_SYSRAM_CH_NUM, dscr_idx,
649 acp_dma_start(acp_mmio, ACP_TO_SYSRAM_CH_NUM, false);
651 acp_reg_write((intr_flag & BIT(I2S_TO_ACP_DMA_CH_NUM)) << 16,
652 acp_mmio, mmACP_EXTERNAL_INTR_STAT);
655 if ((intr_flag & BIT(ACP_TO_SYSRAM_CH_NUM)) != 0) {
657 snd_pcm_period_elapsed(irq_data->capture_i2ssp_stream);
658 acp_reg_write((intr_flag & BIT(ACP_TO_SYSRAM_CH_NUM)) << 16,
659 acp_mmio, mmACP_EXTERNAL_INTR_STAT);
668 static int acp_dma_open(struct snd_pcm_substream *substream)
672 struct snd_pcm_runtime *runtime = substream->runtime;
673 struct snd_soc_pcm_runtime *prtd = substream->private_data;
674 struct snd_soc_component *component = snd_soc_rtdcom_lookup(prtd,
676 struct audio_drv_data *intr_data = dev_get_drvdata(component->dev);
677 struct audio_substream_data *adata =
678 kzalloc(sizeof(struct audio_substream_data), GFP_KERNEL);
682 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
683 switch (intr_data->asic_type) {
685 runtime->hw = acp_st_pcm_hardware_playback;
688 runtime->hw = acp_pcm_hardware_playback;
691 switch (intr_data->asic_type) {
693 runtime->hw = acp_st_pcm_hardware_capture;
696 runtime->hw = acp_pcm_hardware_capture;
700 ret = snd_pcm_hw_constraint_integer(runtime,
701 SNDRV_PCM_HW_PARAM_PERIODS);
703 dev_err(component->dev, "set integer constraint failed\n");
708 adata->acp_mmio = intr_data->acp_mmio;
709 runtime->private_data = adata;
712 * Enable ACP irq, when neither playback or capture streams are
713 * active by the time when a new stream is being opened.
714 * This enablement is not required for another stream, if current
715 * stream is not closed
717 if (!intr_data->play_i2ssp_stream && !intr_data->capture_i2ssp_stream)
718 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
720 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
721 intr_data->play_i2ssp_stream = substream;
723 * For Stoney, Memory gating is disabled,i.e SRAM Banks
724 * won't be turned off. The default state for SRAM banks is ON.
725 * Setting SRAM bank state code skipped for STONEY platform.
727 if (intr_data->asic_type != CHIP_STONEY) {
728 for (bank = 1; bank <= 4; bank++)
729 acp_set_sram_bank_state(intr_data->acp_mmio,
733 intr_data->capture_i2ssp_stream = substream;
734 if (intr_data->asic_type != CHIP_STONEY) {
735 for (bank = 5; bank <= 8; bank++)
736 acp_set_sram_bank_state(intr_data->acp_mmio,
744 static int acp_dma_hw_params(struct snd_pcm_substream *substream,
745 struct snd_pcm_hw_params *params)
751 struct snd_pcm_runtime *runtime;
752 struct audio_substream_data *rtd;
753 struct snd_soc_pcm_runtime *prtd = substream->private_data;
754 struct snd_soc_component *component = snd_soc_rtdcom_lookup(prtd,
756 struct audio_drv_data *adata = dev_get_drvdata(component->dev);
758 runtime = substream->runtime;
759 rtd = runtime->private_data;
764 if (adata->asic_type == CHIP_STONEY) {
765 val = acp_reg_read(adata->acp_mmio,
766 mmACP_I2S_16BIT_RESOLUTION_EN);
767 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
768 val |= ACP_I2S_SP_16BIT_RESOLUTION_EN;
770 val |= ACP_I2S_MIC_16BIT_RESOLUTION_EN;
771 acp_reg_write(val, adata->acp_mmio,
772 mmACP_I2S_16BIT_RESOLUTION_EN);
775 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
776 switch (adata->asic_type) {
778 rtd->pte_offset = ACP_ST_PLAYBACK_PTE_OFFSET;
781 rtd->pte_offset = ACP_PLAYBACK_PTE_OFFSET;
783 rtd->ch1 = SYSRAM_TO_ACP_CH_NUM;
784 rtd->ch2 = ACP_TO_I2S_DMA_CH_NUM;
785 rtd->sram_bank = ACP_SRAM_BANK_1_ADDRESS;
786 rtd->destination = TO_ACP_I2S_1;
787 rtd->dma_dscr_idx_1 = PLAYBACK_START_DMA_DESCR_CH12;
788 rtd->dma_dscr_idx_2 = PLAYBACK_START_DMA_DESCR_CH13;
789 rtd->byte_cnt_high_reg_offset =
790 mmACP_I2S_TRANSMIT_BYTE_CNT_HIGH;
791 rtd->byte_cnt_low_reg_offset = mmACP_I2S_TRANSMIT_BYTE_CNT_LOW;
793 switch (adata->asic_type) {
795 rtd->pte_offset = ACP_ST_CAPTURE_PTE_OFFSET;
796 rtd->sram_bank = ACP_SRAM_BANK_2_ADDRESS;
799 rtd->pte_offset = ACP_CAPTURE_PTE_OFFSET;
800 rtd->sram_bank = ACP_SRAM_BANK_5_ADDRESS;
802 rtd->ch1 = ACP_TO_SYSRAM_CH_NUM;
803 rtd->ch2 = I2S_TO_ACP_DMA_CH_NUM;
804 rtd->destination = FROM_ACP_I2S_1;
805 rtd->dma_dscr_idx_1 = CAPTURE_START_DMA_DESCR_CH14;
806 rtd->dma_dscr_idx_2 = CAPTURE_START_DMA_DESCR_CH15;
807 rtd->byte_cnt_high_reg_offset =
808 mmACP_I2S_RECEIVED_BYTE_CNT_HIGH;
809 rtd->byte_cnt_low_reg_offset = mmACP_I2S_RECEIVED_BYTE_CNT_LOW;
812 size = params_buffer_bytes(params);
813 status = snd_pcm_lib_malloc_pages(substream, size);
817 memset(substream->runtime->dma_area, 0, params_buffer_bytes(params));
818 pg = virt_to_page(substream->dma_buffer.area);
821 acp_set_sram_bank_state(rtd->acp_mmio, 0, true);
822 /* Save for runtime private data */
824 rtd->order = get_order(size);
826 /* Fill the page table entries in ACP SRAM */
829 rtd->num_of_pages = PAGE_ALIGN(size) >> PAGE_SHIFT;
830 rtd->direction = substream->stream;
832 config_acp_dma(rtd->acp_mmio, rtd, adata->asic_type);
840 static int acp_dma_hw_free(struct snd_pcm_substream *substream)
842 return snd_pcm_lib_free_pages(substream);
845 static u64 acp_get_byte_count(struct audio_substream_data *rtd)
847 union acp_dma_count byte_count;
849 byte_count.bcount.high = acp_reg_read(rtd->acp_mmio,
850 rtd->byte_cnt_high_reg_offset);
851 byte_count.bcount.low = acp_reg_read(rtd->acp_mmio,
852 rtd->byte_cnt_low_reg_offset);
853 return byte_count.bytescount;
856 static snd_pcm_uframes_t acp_dma_pointer(struct snd_pcm_substream *substream)
862 struct snd_pcm_runtime *runtime = substream->runtime;
863 struct audio_substream_data *rtd = runtime->private_data;
868 buffersize = frames_to_bytes(runtime, runtime->buffer_size);
869 bytescount = acp_get_byte_count(rtd);
871 if (bytescount > rtd->bytescount)
872 bytescount -= rtd->bytescount;
873 pos = do_div(bytescount, buffersize);
874 return bytes_to_frames(runtime, pos);
877 static int acp_dma_mmap(struct snd_pcm_substream *substream,
878 struct vm_area_struct *vma)
880 return snd_pcm_lib_default_mmap(substream, vma);
883 static int acp_dma_prepare(struct snd_pcm_substream *substream)
885 struct snd_pcm_runtime *runtime = substream->runtime;
886 struct audio_substream_data *rtd = runtime->private_data;
891 config_acp_dma_channel(rtd->acp_mmio,
894 NUM_DSCRS_PER_CHANNEL, 0);
895 config_acp_dma_channel(rtd->acp_mmio,
898 NUM_DSCRS_PER_CHANNEL, 0);
902 static int acp_dma_trigger(struct snd_pcm_substream *substream, int cmd)
908 struct snd_pcm_runtime *runtime = substream->runtime;
909 struct snd_soc_pcm_runtime *prtd = substream->private_data;
910 struct audio_substream_data *rtd = runtime->private_data;
911 struct snd_soc_component *component = snd_soc_rtdcom_lookup(prtd,
917 case SNDRV_PCM_TRIGGER_START:
918 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
919 case SNDRV_PCM_TRIGGER_RESUME:
920 bytescount = acp_get_byte_count(rtd);
921 if (rtd->bytescount == 0)
922 rtd->bytescount = bytescount;
923 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
924 acp_dma_start(rtd->acp_mmio, rtd->ch1, false);
925 while (acp_reg_read(rtd->acp_mmio, mmACP_DMA_CH_STS) &
928 dev_err(component->dev,
929 "acp dma start timeout\n");
935 acp_dma_start(rtd->acp_mmio, rtd->ch2, true);
938 case SNDRV_PCM_TRIGGER_STOP:
939 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
940 case SNDRV_PCM_TRIGGER_SUSPEND:
941 /* For playback, non circular dma should be stopped first
942 * i.e Sysram to acp dma transfer channel(rtd->ch1) should be
943 * stopped before stopping cirular dma which is acp sram to i2s
944 * fifo dma transfer channel(rtd->ch2). Where as in Capture
945 * scenario, i2s fifo to acp sram dma channel(rtd->ch2) stopped
946 * first before stopping acp sram to sysram which is circular
949 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
950 acp_dma_stop(rtd->acp_mmio, rtd->ch1);
951 ret = acp_dma_stop(rtd->acp_mmio, rtd->ch2);
953 acp_dma_stop(rtd->acp_mmio, rtd->ch2);
954 ret = acp_dma_stop(rtd->acp_mmio, rtd->ch1);
964 static int acp_dma_new(struct snd_soc_pcm_runtime *rtd)
967 struct snd_soc_component *component = snd_soc_rtdcom_lookup(rtd,
969 struct audio_drv_data *adata = dev_get_drvdata(component->dev);
971 switch (adata->asic_type) {
973 ret = snd_pcm_lib_preallocate_pages_for_all(rtd->pcm,
979 ret = snd_pcm_lib_preallocate_pages_for_all(rtd->pcm,
986 dev_err(component->dev,
987 "buffer preallocation failure error:%d\n", ret);
991 static int acp_dma_close(struct snd_pcm_substream *substream)
994 struct snd_pcm_runtime *runtime = substream->runtime;
995 struct audio_substream_data *rtd = runtime->private_data;
996 struct snd_soc_pcm_runtime *prtd = substream->private_data;
997 struct snd_soc_component *component = snd_soc_rtdcom_lookup(prtd,
999 struct audio_drv_data *adata = dev_get_drvdata(component->dev);
1001 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
1002 adata->play_i2ssp_stream = NULL;
1004 * For Stoney, Memory gating is disabled,i.e SRAM Banks
1005 * won't be turned off. The default state for SRAM banks is ON.
1006 * Setting SRAM bank state code skipped for STONEY platform.
1007 * added condition checks for Carrizo platform only
1009 if (adata->asic_type != CHIP_STONEY) {
1010 for (bank = 1; bank <= 4; bank++)
1011 acp_set_sram_bank_state(adata->acp_mmio, bank,
1015 adata->capture_i2ssp_stream = NULL;
1016 if (adata->asic_type != CHIP_STONEY) {
1017 for (bank = 5; bank <= 8; bank++)
1018 acp_set_sram_bank_state(adata->acp_mmio, bank,
1024 * Disable ACP irq, when the current stream is being closed and
1025 * another stream is also not active.
1027 if (!adata->play_i2ssp_stream && !adata->capture_i2ssp_stream)
1028 acp_reg_write(0, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1033 static const struct snd_pcm_ops acp_dma_ops = {
1034 .open = acp_dma_open,
1035 .close = acp_dma_close,
1036 .ioctl = snd_pcm_lib_ioctl,
1037 .hw_params = acp_dma_hw_params,
1038 .hw_free = acp_dma_hw_free,
1039 .trigger = acp_dma_trigger,
1040 .pointer = acp_dma_pointer,
1041 .mmap = acp_dma_mmap,
1042 .prepare = acp_dma_prepare,
1045 static const struct snd_soc_component_driver acp_asoc_platform = {
1047 .ops = &acp_dma_ops,
1048 .pcm_new = acp_dma_new,
1051 static int acp_audio_probe(struct platform_device *pdev)
1054 struct audio_drv_data *audio_drv_data;
1055 struct resource *res;
1056 const u32 *pdata = pdev->dev.platform_data;
1059 dev_err(&pdev->dev, "Missing platform data\n");
1063 audio_drv_data = devm_kzalloc(&pdev->dev, sizeof(struct audio_drv_data),
1065 if (!audio_drv_data)
1068 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1069 audio_drv_data->acp_mmio = devm_ioremap_resource(&pdev->dev, res);
1070 if (IS_ERR(audio_drv_data->acp_mmio))
1071 return PTR_ERR(audio_drv_data->acp_mmio);
1074 * The following members gets populated in device 'open'
1075 * function. Till then interrupts are disabled in 'acp_init'
1076 * and device doesn't generate any interrupts.
1079 audio_drv_data->play_i2ssp_stream = NULL;
1080 audio_drv_data->capture_i2ssp_stream = NULL;
1082 audio_drv_data->asic_type = *pdata;
1084 res = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1086 dev_err(&pdev->dev, "IORESOURCE_IRQ FAILED\n");
1090 status = devm_request_irq(&pdev->dev, res->start, dma_irq_handler,
1091 0, "ACP_IRQ", &pdev->dev);
1093 dev_err(&pdev->dev, "ACP IRQ request failed\n");
1097 dev_set_drvdata(&pdev->dev, audio_drv_data);
1099 /* Initialize the ACP */
1100 status = acp_init(audio_drv_data->acp_mmio, audio_drv_data->asic_type);
1102 dev_err(&pdev->dev, "ACP Init failed status:%d\n", status);
1106 status = devm_snd_soc_register_component(&pdev->dev,
1107 &acp_asoc_platform, NULL, 0);
1109 dev_err(&pdev->dev, "Fail to register ALSA platform device\n");
1113 pm_runtime_set_autosuspend_delay(&pdev->dev, 10000);
1114 pm_runtime_use_autosuspend(&pdev->dev);
1115 pm_runtime_enable(&pdev->dev);
1120 static int acp_audio_remove(struct platform_device *pdev)
1123 struct audio_drv_data *adata = dev_get_drvdata(&pdev->dev);
1125 status = acp_deinit(adata->acp_mmio);
1127 dev_err(&pdev->dev, "ACP Deinit failed status:%d\n", status);
1128 pm_runtime_disable(&pdev->dev);
1133 static int acp_pcm_resume(struct device *dev)
1137 struct audio_drv_data *adata = dev_get_drvdata(dev);
1139 status = acp_init(adata->acp_mmio, adata->asic_type);
1141 dev_err(dev, "ACP Init failed status:%d\n", status);
1145 if (adata->play_i2ssp_stream && adata->play_i2ssp_stream->runtime) {
1147 * For Stoney, Memory gating is disabled,i.e SRAM Banks
1148 * won't be turned off. The default state for SRAM banks is ON.
1149 * Setting SRAM bank state code skipped for STONEY platform.
1151 if (adata->asic_type != CHIP_STONEY) {
1152 for (bank = 1; bank <= 4; bank++)
1153 acp_set_sram_bank_state(adata->acp_mmio, bank,
1156 config_acp_dma(adata->acp_mmio,
1157 adata->play_i2ssp_stream->runtime->private_data,
1160 if (adata->capture_i2ssp_stream &&
1161 adata->capture_i2ssp_stream->runtime) {
1162 if (adata->asic_type != CHIP_STONEY) {
1163 for (bank = 5; bank <= 8; bank++)
1164 acp_set_sram_bank_state(adata->acp_mmio, bank,
1167 config_acp_dma(adata->acp_mmio,
1168 adata->capture_i2ssp_stream->runtime->private_data,
1171 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1175 static int acp_pcm_runtime_suspend(struct device *dev)
1178 struct audio_drv_data *adata = dev_get_drvdata(dev);
1180 status = acp_deinit(adata->acp_mmio);
1182 dev_err(dev, "ACP Deinit failed status:%d\n", status);
1183 acp_reg_write(0, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1187 static int acp_pcm_runtime_resume(struct device *dev)
1190 struct audio_drv_data *adata = dev_get_drvdata(dev);
1192 status = acp_init(adata->acp_mmio, adata->asic_type);
1194 dev_err(dev, "ACP Init failed status:%d\n", status);
1197 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1201 static const struct dev_pm_ops acp_pm_ops = {
1202 .resume = acp_pcm_resume,
1203 .runtime_suspend = acp_pcm_runtime_suspend,
1204 .runtime_resume = acp_pcm_runtime_resume,
1207 static struct platform_driver acp_dma_driver = {
1208 .probe = acp_audio_probe,
1209 .remove = acp_audio_remove,
1216 module_platform_driver(acp_dma_driver);
1218 MODULE_AUTHOR("Vijendar.Mukunda@amd.com");
1219 MODULE_AUTHOR("Maruthi.Bayyavarapu@amd.com");
1220 MODULE_DESCRIPTION("AMD ACP PCM Driver");
1221 MODULE_LICENSE("GPL v2");
1222 MODULE_ALIAS("platform:"DRV_NAME);