1 // SPDX-License-Identifier: GPL-2.0+
3 * (C) Copyright 2013 - 2022, Xilinx, Inc.
4 * (C) Copyright 2022, Advanced Micro Devices, Inc.
6 * Xilinx Zynq SD Host Controller Interface
13 #include <linux/delay.h>
14 #include "mmc_private.h"
17 #include <asm/arch/sys_proto.h>
18 #include <dm/device_compat.h>
19 #include <linux/err.h>
20 #include <linux/libfdt.h>
21 #include <linux/iopoll.h>
22 #include <asm/types.h>
23 #include <linux/math64.h>
24 #include <asm/cache.h>
27 #include <zynqmp_firmware.h>
29 #define SDHCI_ARASAN_ITAPDLY_REGISTER 0xF0F8
30 #define SDHCI_ARASAN_ITAPDLY_SEL_MASK GENMASK(7, 0)
31 #define SDHCI_ARASAN_OTAPDLY_REGISTER 0xF0FC
32 #define SDHCI_ARASAN_OTAPDLY_SEL_MASK GENMASK(5, 0)
33 #define SDHCI_ITAPDLY_CHGWIN BIT(9)
34 #define SDHCI_ITAPDLY_ENABLE BIT(8)
35 #define SDHCI_OTAPDLY_ENABLE BIT(6)
37 #define SDHCI_TUNING_LOOP_COUNT 40
40 #define SD_DLL_CTRL 0xFF180358
41 #define SD_ITAP_DLY 0xFF180314
42 #define SD_OTAP_DLY 0xFF180318
43 #define SD0_DLL_RST BIT(2)
44 #define SD1_DLL_RST BIT(18)
45 #define SD0_ITAPCHGWIN BIT(9)
46 #define SD1_ITAPCHGWIN BIT(25)
47 #define SD0_ITAPDLYENA BIT(8)
48 #define SD1_ITAPDLYENA BIT(24)
49 #define SD0_ITAPDLYSEL_MASK GENMASK(7, 0)
50 #define SD1_ITAPDLYSEL_MASK GENMASK(23, 16)
51 #define SD0_OTAPDLYSEL_MASK GENMASK(5, 0)
52 #define SD1_OTAPDLYSEL_MASK GENMASK(21, 16)
54 #define MIN_PHY_CLK_HZ 50000000
56 #define PHY_CTRL_REG1 0x270
57 #define PHY_CTRL_ITAPDLY_ENA_MASK BIT(0)
58 #define PHY_CTRL_ITAPDLY_SEL_MASK GENMASK(5, 1)
59 #define PHY_CTRL_ITAPDLY_SEL_SHIFT 1
60 #define PHY_CTRL_ITAP_CHG_WIN_MASK BIT(6)
61 #define PHY_CTRL_OTAPDLY_ENA_MASK BIT(8)
62 #define PHY_CTRL_OTAPDLY_SEL_MASK GENMASK(15, 12)
63 #define PHY_CTRL_OTAPDLY_SEL_SHIFT 12
64 #define PHY_CTRL_STRB_SEL_MASK GENMASK(23, 16)
65 #define PHY_CTRL_STRB_SEL_SHIFT 16
66 #define PHY_CTRL_TEST_CTRL_MASK GENMASK(31, 24)
68 #define PHY_CTRL_REG2 0x274
69 #define PHY_CTRL_EN_DLL_MASK BIT(0)
70 #define PHY_CTRL_DLL_RDY_MASK BIT(1)
71 #define PHY_CTRL_FREQ_SEL_MASK GENMASK(6, 4)
72 #define PHY_CTRL_FREQ_SEL_SHIFT 4
73 #define PHY_CTRL_SEL_DLY_TX_MASK BIT(16)
74 #define PHY_CTRL_SEL_DLY_RX_MASK BIT(17)
75 #define FREQSEL_200M_170M 0x0
76 #define FREQSEL_170M_140M 0x1
77 #define FREQSEL_140M_110M 0x2
78 #define FREQSEL_110M_80M 0x3
79 #define FREQSEL_80M_50M 0x4
80 #define FREQSEL_275M_250M 0x5
81 #define FREQSEL_250M_225M 0x6
82 #define FREQSEL_225M_200M 0x7
83 #define PHY_DLL_TIMEOUT_MS 100
85 #define VERSAL_NET_EMMC_ICLK_PHASE_DDR52_DLY_CHAIN 39
86 #define VERSAL_NET_EMMC_ICLK_PHASE_DDR52_DLL 146
87 #define VERSAL_NET_PHY_CTRL_STRB90_STRB180_VAL 0X77
89 struct arasan_sdhci_clk_data {
90 int clk_phase_in[MMC_TIMING_MMC_HS400 + 1];
91 int clk_phase_out[MMC_TIMING_MMC_HS400 + 1];
94 struct arasan_sdhci_plat {
95 struct mmc_config cfg;
99 struct arasan_sdhci_priv {
100 struct sdhci_host *host;
101 struct arasan_sdhci_clk_data clk_data;
105 bool internal_phy_reg;
106 struct reset_ctl_bulk resets;
109 /* For Versal platforms zynqmp_mmio_write() won't be available */
110 __weak int zynqmp_mmio_write(const u32 address, const u32 mask, const u32 value)
115 __weak int xilinx_pm_request(u32 api_id, u32 arg0, u32 arg1, u32 arg2,
116 u32 arg3, u32 *ret_payload)
121 __weak int zynqmp_pm_is_function_supported(const u32 api_id, const u32 id)
126 #if defined(CONFIG_ARCH_ZYNQMP) || defined(CONFIG_ARCH_VERSAL) || defined(CONFIG_ARCH_VERSAL_NET)
127 /* Default settings for ZynqMP Clock Phases */
128 static const u32 zynqmp_iclk_phases[] = {0, 63, 63, 0, 63, 0,
130 static const u32 zynqmp_oclk_phases[] = {0, 72, 60, 0, 60, 72,
131 135, 48, 72, 135, 0};
133 /* Default settings for Versal Clock Phases */
134 static const u32 versal_iclk_phases[] = {0, 132, 132, 0, 132,
135 0, 0, 162, 90, 0, 0};
136 static const u32 versal_oclk_phases[] = {0, 60, 48, 0, 48, 72,
139 /* Default settings for versal-net eMMC Clock Phases */
140 static const u32 versal_net_emmc_iclk_phases[] = {0, 0, 0, 0, 0, 0, 0, 0, 39,
142 static const u32 versal_net_emmc_oclk_phases[] = {0, 113, 0, 0, 0, 0, 0, 0,
145 static const u8 mode2timing[] = {
146 [MMC_LEGACY] = MMC_TIMING_LEGACY,
147 [MMC_HS] = MMC_TIMING_MMC_HS,
148 [SD_HS] = MMC_TIMING_SD_HS,
149 [MMC_HS_52] = MMC_TIMING_MMC_HS,
150 [MMC_DDR_52] = MMC_TIMING_MMC_DDR52,
151 [UHS_SDR12] = MMC_TIMING_UHS_SDR12,
152 [UHS_SDR25] = MMC_TIMING_UHS_SDR25,
153 [UHS_SDR50] = MMC_TIMING_UHS_SDR50,
154 [UHS_DDR50] = MMC_TIMING_UHS_DDR50,
155 [UHS_SDR104] = MMC_TIMING_UHS_SDR104,
156 [MMC_HS_200] = MMC_TIMING_MMC_HS200,
157 [MMC_HS_400] = MMC_TIMING_MMC_HS400,
160 #if defined(CONFIG_ARCH_VERSAL_NET)
162 * arasan_phy_set_delaychain - Set eMMC delay chain based Input/Output clock
164 * @host: Pointer to the sdhci_host structure
165 * @enable: Enable or disable Delay chain based Tx and Rx clock
168 * Enable or disable eMMC delay chain based Input and Output clock in
171 static void arasan_phy_set_delaychain(struct sdhci_host *host, bool enable)
175 reg = sdhci_readw(host, PHY_CTRL_REG2);
177 reg |= PHY_CTRL_SEL_DLY_TX_MASK | PHY_CTRL_SEL_DLY_RX_MASK;
179 reg &= ~(PHY_CTRL_SEL_DLY_TX_MASK | PHY_CTRL_SEL_DLY_RX_MASK);
181 sdhci_writew(host, reg, PHY_CTRL_REG2);
185 * arasan_phy_set_dll - Set eMMC DLL clock
187 * @host: Pointer to the sdhci_host structure
188 * @enable: Enable or disable DLL clock
189 * Return: 0 if success or timeout error
191 * Enable or disable eMMC DLL clock in PHY_CTRL_REG2. When DLL enable is
192 * set, wait till DLL is locked
194 static int arasan_phy_set_dll(struct sdhci_host *host, bool enable)
198 reg = sdhci_readw(host, PHY_CTRL_REG2);
200 reg |= PHY_CTRL_EN_DLL_MASK;
202 reg &= ~PHY_CTRL_EN_DLL_MASK;
204 sdhci_writew(host, reg, PHY_CTRL_REG2);
206 /* If DLL is disabled return success */
210 /* If DLL is enabled wait till DLL loop is locked, which is
211 * indicated by dll_rdy bit(bit1) in PHY_CTRL_REG2
213 return readl_relaxed_poll_timeout(host->ioaddr + PHY_CTRL_REG2, reg,
214 (reg & PHY_CTRL_DLL_RDY_MASK),
215 1000 * PHY_DLL_TIMEOUT_MS);
219 * arasan_phy_dll_set_freq - Select frequency range of DLL for eMMC
221 * @host: Pointer to the sdhci_host structure
222 * @clock: clock value
225 * Set frequency range bits based on the selected clock for eMMC
227 static void arasan_phy_dll_set_freq(struct sdhci_host *host, int clock)
229 u32 reg, freq_sel, freq;
231 freq = DIV_ROUND_CLOSEST(clock, 1000000);
232 if (freq <= 200 && freq > 170)
233 freq_sel = FREQSEL_200M_170M;
234 else if (freq <= 170 && freq > 140)
235 freq_sel = FREQSEL_170M_140M;
236 else if (freq <= 140 && freq > 110)
237 freq_sel = FREQSEL_140M_110M;
238 else if (freq <= 110 && freq > 80)
239 freq_sel = FREQSEL_110M_80M;
241 freq_sel = FREQSEL_80M_50M;
243 reg = sdhci_readw(host, PHY_CTRL_REG2);
244 reg &= ~PHY_CTRL_FREQ_SEL_MASK;
245 reg |= (freq_sel << PHY_CTRL_FREQ_SEL_SHIFT);
246 sdhci_writew(host, reg, PHY_CTRL_REG2);
249 static int arasan_sdhci_config_dll(struct sdhci_host *host, unsigned int clock, bool enable)
251 struct mmc *mmc = (struct mmc *)host->mmc;
252 struct arasan_sdhci_priv *priv = dev_get_priv(mmc->dev);
255 if (priv->internal_phy_reg && clock >= MIN_PHY_CLK_HZ && enable)
256 arasan_phy_set_dll(host, 1);
260 if (priv->internal_phy_reg && clock >= MIN_PHY_CLK_HZ) {
261 sdhci_writew(host, 0, SDHCI_CLOCK_CONTROL);
262 arasan_phy_set_dll(host, 0);
263 arasan_phy_set_delaychain(host, 0);
264 arasan_phy_dll_set_freq(host, clock);
268 sdhci_writew(host, 0, SDHCI_CLOCK_CONTROL);
269 arasan_phy_set_delaychain(host, 1);
275 static inline int arasan_zynqmp_set_in_tapdelay(u32 node_id, u32 itap_delay)
279 if (IS_ENABLED(CONFIG_SPL_BUILD) || current_el() == 3) {
280 if (node_id == NODE_SD_0) {
281 ret = zynqmp_mmio_write(SD_ITAP_DLY, SD0_ITAPCHGWIN,
286 ret = zynqmp_mmio_write(SD_ITAP_DLY, SD0_ITAPDLYENA,
291 ret = zynqmp_mmio_write(SD_ITAP_DLY, SD0_ITAPDLYSEL_MASK,
296 ret = zynqmp_mmio_write(SD_ITAP_DLY, SD0_ITAPCHGWIN, 0);
300 ret = zynqmp_mmio_write(SD_ITAP_DLY, SD1_ITAPCHGWIN,
305 ret = zynqmp_mmio_write(SD_ITAP_DLY, SD1_ITAPDLYENA,
310 ret = zynqmp_mmio_write(SD_ITAP_DLY, SD1_ITAPDLYSEL_MASK,
315 ret = zynqmp_mmio_write(SD_ITAP_DLY, SD1_ITAPCHGWIN, 0);
319 return xilinx_pm_request(PM_IOCTL, node_id,
320 IOCTL_SET_SD_TAPDELAY,
321 PM_TAPDELAY_INPUT, itap_delay, NULL);
327 static inline int arasan_zynqmp_set_out_tapdelay(u32 node_id, u32 otap_delay)
329 if (IS_ENABLED(CONFIG_SPL_BUILD) || current_el() == 3) {
330 if (node_id == NODE_SD_0)
331 return zynqmp_mmio_write(SD_OTAP_DLY,
335 return zynqmp_mmio_write(SD_OTAP_DLY, SD1_OTAPDLYSEL_MASK,
338 return xilinx_pm_request(PM_IOCTL, node_id,
339 IOCTL_SET_SD_TAPDELAY,
340 PM_TAPDELAY_OUTPUT, otap_delay, NULL);
344 static inline int zynqmp_dll_reset(u32 node_id, u32 type)
346 if (IS_ENABLED(CONFIG_SPL_BUILD) || current_el() == 3) {
347 if (node_id == NODE_SD_0)
348 return zynqmp_mmio_write(SD_DLL_CTRL, SD0_DLL_RST,
349 type == PM_DLL_RESET_ASSERT ?
352 return zynqmp_mmio_write(SD_DLL_CTRL, SD1_DLL_RST,
353 type == PM_DLL_RESET_ASSERT ?
356 return xilinx_pm_request(PM_IOCTL, node_id,
357 IOCTL_SD_DLL_RESET, type, 0, NULL);
361 static int arasan_zynqmp_dll_reset(struct sdhci_host *host, u32 node_id)
363 struct mmc *mmc = (struct mmc *)host->mmc;
364 struct udevice *dev = mmc->dev;
365 unsigned long timeout;
369 clk = sdhci_readw(host, SDHCI_CLOCK_CONTROL);
370 clk &= ~(SDHCI_CLOCK_CARD_EN);
371 sdhci_writew(host, clk, SDHCI_CLOCK_CONTROL);
373 /* Issue DLL Reset */
374 ret = zynqmp_dll_reset(node_id, PM_DLL_RESET_ASSERT);
376 dev_err(dev, "dll_reset assert failed with err: %d\n", ret);
380 /* Allow atleast 1ms delay for proper DLL reset */
382 ret = zynqmp_dll_reset(node_id, PM_DLL_RESET_RELEASE);
384 dev_err(dev, "dll_reset release failed with err: %d\n", ret);
390 while (!((clk = sdhci_readw(host, SDHCI_CLOCK_CONTROL))
391 & SDHCI_CLOCK_INT_STABLE)) {
393 dev_err(dev, ": Internal clock never stabilised.\n");
400 clk |= SDHCI_CLOCK_CARD_EN;
401 sdhci_writew(host, clk, SDHCI_CLOCK_CONTROL);
406 static int arasan_sdhci_execute_tuning(struct mmc *mmc, u8 opcode)
409 struct mmc_data data;
411 struct sdhci_host *host;
412 struct arasan_sdhci_priv *priv = dev_get_priv(mmc->dev);
413 int tuning_loop_counter = SDHCI_TUNING_LOOP_COUNT;
415 dev_dbg(mmc->dev, "%s\n", __func__);
419 ctrl = sdhci_readw(host, SDHCI_HOST_CONTROL2);
420 ctrl |= SDHCI_CTRL_EXEC_TUNING;
421 sdhci_writew(host, ctrl, SDHCI_HOST_CONTROL2);
425 if (device_is_compatible(mmc->dev, "xlnx,zynqmp-8.9a"))
426 arasan_zynqmp_dll_reset(host, priv->node_id);
428 sdhci_writel(host, SDHCI_INT_DATA_AVAIL, SDHCI_INT_ENABLE);
429 sdhci_writel(host, SDHCI_INT_DATA_AVAIL, SDHCI_SIGNAL_ENABLE);
433 cmd.resp_type = MMC_RSP_R1;
438 data.flags = MMC_DATA_READ;
440 if (tuning_loop_counter-- == 0)
443 if (cmd.cmdidx == MMC_CMD_SEND_TUNING_BLOCK_HS200 &&
445 data.blocksize = 128;
447 sdhci_writew(host, SDHCI_MAKE_BLKSZ(SDHCI_DEFAULT_BOUNDARY_ARG,
450 sdhci_writew(host, data.blocks, SDHCI_BLOCK_COUNT);
451 sdhci_writew(host, SDHCI_TRNS_READ, SDHCI_TRANSFER_MODE);
453 mmc_send_cmd(mmc, &cmd, NULL);
454 ctrl = sdhci_readw(host, SDHCI_HOST_CONTROL2);
456 if (cmd.cmdidx == MMC_CMD_SEND_TUNING_BLOCK)
459 } while (ctrl & SDHCI_CTRL_EXEC_TUNING);
461 if (tuning_loop_counter < 0) {
462 ctrl &= ~SDHCI_CTRL_TUNED_CLK;
463 sdhci_writel(host, ctrl, SDHCI_HOST_CONTROL2);
466 if (!(ctrl & SDHCI_CTRL_TUNED_CLK)) {
467 printf("%s:Tuning failed\n", __func__);
473 if (device_is_compatible(mmc->dev, "xlnx,zynqmp-8.9a"))
474 arasan_zynqmp_dll_reset(host, priv->node_id);
476 /* Enable only interrupts served by the SD controller */
477 sdhci_writel(host, SDHCI_INT_DATA_MASK | SDHCI_INT_CMD_MASK,
479 /* Mask all sdhci interrupt sources */
480 sdhci_writel(host, 0x0, SDHCI_SIGNAL_ENABLE);
486 * sdhci_zynqmp_sdcardclk_set_phase - Set the SD Output Clock Tap Delays
488 * @host: Pointer to the sdhci_host structure.
489 * @degrees: The clock phase shift between 0 - 359.
492 * Set the SD Output Clock Tap Delays for Output path
494 static int sdhci_zynqmp_sdcardclk_set_phase(struct sdhci_host *host,
497 struct mmc *mmc = (struct mmc *)host->mmc;
498 struct udevice *dev = mmc->dev;
499 struct arasan_sdhci_priv *priv = dev_get_priv(mmc->dev);
500 u8 tap_delay, tap_max = 0;
501 int timing = mode2timing[mmc->selected_mode];
505 * This is applicable for SDHCI_SPEC_300 and above
506 * ZynqMP does not set phase for <=25MHz clock.
507 * If degrees is zero, no need to do anything.
509 if (SDHCI_GET_VERSION(host) < SDHCI_SPEC_300)
513 case MMC_TIMING_MMC_HS:
514 case MMC_TIMING_SD_HS:
515 case MMC_TIMING_UHS_SDR25:
516 case MMC_TIMING_UHS_DDR50:
517 case MMC_TIMING_MMC_DDR52:
518 /* For 50MHz clock, 30 Taps are available */
521 case MMC_TIMING_UHS_SDR50:
522 /* For 100MHz clock, 15 Taps are available */
525 case MMC_TIMING_UHS_SDR104:
526 case MMC_TIMING_MMC_HS200:
527 /* For 200MHz clock, 8 Taps are available */
533 tap_delay = (degrees * tap_max) / 360;
535 /* Limit output tap_delay value to 6 bits */
536 tap_delay &= SDHCI_ARASAN_OTAPDLY_SEL_MASK;
538 /* Set the Clock Phase */
539 ret = arasan_zynqmp_set_out_tapdelay(priv->node_id, tap_delay);
541 dev_err(dev, "Error setting output Tap Delay\n");
545 /* Release DLL Reset */
546 ret = zynqmp_dll_reset(priv->node_id, PM_DLL_RESET_RELEASE);
548 dev_err(dev, "dll_reset release failed with err: %d\n", ret);
556 * sdhci_zynqmp_sampleclk_set_phase - Set the SD Input Clock Tap Delays
558 * @host: Pointer to the sdhci_host structure.
559 * @degrees: The clock phase shift between 0 - 359.
562 * Set the SD Input Clock Tap Delays for Input path
564 static int sdhci_zynqmp_sampleclk_set_phase(struct sdhci_host *host,
567 struct mmc *mmc = (struct mmc *)host->mmc;
568 struct udevice *dev = mmc->dev;
569 struct arasan_sdhci_priv *priv = dev_get_priv(mmc->dev);
570 u8 tap_delay, tap_max = 0;
571 int timing = mode2timing[mmc->selected_mode];
575 * This is applicable for SDHCI_SPEC_300 and above
576 * ZynqMP does not set phase for <=25MHz clock.
577 * If degrees is zero, no need to do anything.
579 if (SDHCI_GET_VERSION(host) < SDHCI_SPEC_300)
582 /* Assert DLL Reset */
583 ret = zynqmp_dll_reset(priv->node_id, PM_DLL_RESET_ASSERT);
585 dev_err(dev, "dll_reset assert failed with err: %d\n", ret);
590 case MMC_TIMING_MMC_HS:
591 case MMC_TIMING_SD_HS:
592 case MMC_TIMING_UHS_SDR25:
593 case MMC_TIMING_UHS_DDR50:
594 case MMC_TIMING_MMC_DDR52:
595 /* For 50MHz clock, 120 Taps are available */
598 case MMC_TIMING_UHS_SDR50:
599 /* For 100MHz clock, 60 Taps are available */
602 case MMC_TIMING_UHS_SDR104:
603 case MMC_TIMING_MMC_HS200:
604 /* For 200MHz clock, 30 Taps are available */
610 tap_delay = (degrees * tap_max) / 360;
612 /* Limit input tap_delay value to 8 bits */
613 tap_delay &= SDHCI_ARASAN_ITAPDLY_SEL_MASK;
615 ret = arasan_zynqmp_set_in_tapdelay(priv->node_id, tap_delay);
617 dev_err(dev, "Error setting Input Tap Delay\n");
625 * sdhci_versal_sdcardclk_set_phase - Set the SD Output Clock Tap Delays
627 * @host: Pointer to the sdhci_host structure.
628 * @degrees: The clock phase shift between 0 - 359.
631 * Set the SD Output Clock Tap Delays for Output path
633 static int sdhci_versal_sdcardclk_set_phase(struct sdhci_host *host,
636 struct mmc *mmc = (struct mmc *)host->mmc;
637 u8 tap_delay, tap_max = 0;
638 int timing = mode2timing[mmc->selected_mode];
642 * This is applicable for SDHCI_SPEC_300 and above
643 * Versal does not set phase for <=25MHz clock.
644 * If degrees is zero, no need to do anything.
646 if (SDHCI_GET_VERSION(host) < SDHCI_SPEC_300)
650 case MMC_TIMING_MMC_HS:
651 case MMC_TIMING_SD_HS:
652 case MMC_TIMING_UHS_SDR25:
653 case MMC_TIMING_UHS_DDR50:
654 case MMC_TIMING_MMC_DDR52:
655 /* For 50MHz clock, 30 Taps are available */
658 case MMC_TIMING_UHS_SDR50:
659 /* For 100MHz clock, 15 Taps are available */
662 case MMC_TIMING_UHS_SDR104:
663 case MMC_TIMING_MMC_HS200:
664 /* For 200MHz clock, 8 Taps are available */
670 tap_delay = (degrees * tap_max) / 360;
672 /* Limit output tap_delay value to 6 bits */
673 tap_delay &= SDHCI_ARASAN_OTAPDLY_SEL_MASK;
675 /* Set the Clock Phase */
676 regval = sdhci_readl(host, SDHCI_ARASAN_OTAPDLY_REGISTER);
677 regval |= SDHCI_OTAPDLY_ENABLE;
678 sdhci_writel(host, regval, SDHCI_ARASAN_OTAPDLY_REGISTER);
679 regval &= ~SDHCI_ARASAN_OTAPDLY_SEL_MASK;
681 sdhci_writel(host, regval, SDHCI_ARASAN_OTAPDLY_REGISTER);
687 * sdhci_versal_sampleclk_set_phase - Set the SD Input Clock Tap Delays
689 * @host: Pointer to the sdhci_host structure.
690 * @degrees: The clock phase shift between 0 - 359.
693 * Set the SD Input Clock Tap Delays for Input path
695 static int sdhci_versal_sampleclk_set_phase(struct sdhci_host *host,
698 struct mmc *mmc = (struct mmc *)host->mmc;
699 u8 tap_delay, tap_max = 0;
700 int timing = mode2timing[mmc->selected_mode];
704 * This is applicable for SDHCI_SPEC_300 and above
705 * Versal does not set phase for <=25MHz clock.
706 * If degrees is zero, no need to do anything.
708 if (SDHCI_GET_VERSION(host) < SDHCI_SPEC_300)
712 case MMC_TIMING_MMC_HS:
713 case MMC_TIMING_SD_HS:
714 case MMC_TIMING_UHS_SDR25:
715 case MMC_TIMING_UHS_DDR50:
716 case MMC_TIMING_MMC_DDR52:
717 /* For 50MHz clock, 120 Taps are available */
720 case MMC_TIMING_UHS_SDR50:
721 /* For 100MHz clock, 60 Taps are available */
724 case MMC_TIMING_UHS_SDR104:
725 case MMC_TIMING_MMC_HS200:
726 /* For 200MHz clock, 30 Taps are available */
732 tap_delay = (degrees * tap_max) / 360;
734 /* Limit input tap_delay value to 8 bits */
735 tap_delay &= SDHCI_ARASAN_ITAPDLY_SEL_MASK;
737 /* Set the Clock Phase */
738 regval = sdhci_readl(host, SDHCI_ARASAN_ITAPDLY_REGISTER);
739 regval |= SDHCI_ITAPDLY_CHGWIN;
740 sdhci_writel(host, regval, SDHCI_ARASAN_ITAPDLY_REGISTER);
741 regval |= SDHCI_ITAPDLY_ENABLE;
742 sdhci_writel(host, regval, SDHCI_ARASAN_ITAPDLY_REGISTER);
743 regval &= ~SDHCI_ARASAN_ITAPDLY_SEL_MASK;
745 sdhci_writel(host, regval, SDHCI_ARASAN_ITAPDLY_REGISTER);
746 regval &= ~SDHCI_ITAPDLY_CHGWIN;
747 sdhci_writel(host, regval, SDHCI_ARASAN_ITAPDLY_REGISTER);
753 * sdhci_versal_net_emmc_sdcardclk_set_phase - Set eMMC Output Clock Tap Delays
755 * @host: Pointer to the sdhci_host structure.
756 * @degrees: The clock phase shift between 0 - 359.
759 * Set eMMC Output Clock Tap Delays for Output path
761 static int sdhci_versal_net_emmc_sdcardclk_set_phase(struct sdhci_host *host, int degrees)
763 struct mmc *mmc = (struct mmc *)host->mmc;
764 int timing = mode2timing[mmc->selected_mode];
765 u8 tap_delay, tap_max = 0;
769 case MMC_TIMING_MMC_HS:
770 case MMC_TIMING_MMC_DDR52:
773 case MMC_TIMING_MMC_HS200:
774 case MMC_TIMING_MMC_HS400:
775 /* For 200MHz clock, 32 Taps are available */
782 tap_delay = (degrees * tap_max) / 360;
783 /* Set the Clock Phase */
785 regval = sdhci_readl(host, PHY_CTRL_REG1);
786 regval |= PHY_CTRL_OTAPDLY_ENA_MASK;
787 sdhci_writel(host, regval, PHY_CTRL_REG1);
788 regval &= ~PHY_CTRL_OTAPDLY_SEL_MASK;
789 regval |= tap_delay << PHY_CTRL_OTAPDLY_SEL_SHIFT;
790 sdhci_writel(host, regval, PHY_CTRL_REG1);
797 * sdhci_versal_net_emmc_sampleclk_set_phase - Set eMMC Input Clock Tap Delays
799 * @host: Pointer to the sdhci_host structure.
800 * @degrees: The clock phase shift between 0 - 359.
803 * Set eMMC Input Clock Tap Delays for Input path. If HS400 is selected,
804 * set strobe90 and strobe180 in PHY_CTRL_REG1.
806 static int sdhci_versal_net_emmc_sampleclk_set_phase(struct sdhci_host *host, int degrees)
808 struct mmc *mmc = (struct mmc *)host->mmc;
809 int timing = mode2timing[mmc->selected_mode];
810 u8 tap_delay, tap_max = 0;
814 case MMC_TIMING_MMC_HS:
815 case MMC_TIMING_MMC_DDR52:
818 case MMC_TIMING_MMC_HS400:
819 /* Strobe select tap point for strb90 and strb180 */
820 regval = sdhci_readl(host, PHY_CTRL_REG1);
821 regval &= ~PHY_CTRL_STRB_SEL_MASK;
822 regval |= VERSAL_NET_PHY_CTRL_STRB90_STRB180_VAL << PHY_CTRL_STRB_SEL_SHIFT;
823 sdhci_writel(host, regval, PHY_CTRL_REG1);
829 tap_delay = (degrees * tap_max) / 360;
830 /* Set the Clock Phase */
832 regval = sdhci_readl(host, PHY_CTRL_REG1);
833 regval |= PHY_CTRL_ITAP_CHG_WIN_MASK;
834 sdhci_writel(host, regval, PHY_CTRL_REG1);
835 regval |= PHY_CTRL_ITAPDLY_ENA_MASK;
836 sdhci_writel(host, regval, PHY_CTRL_REG1);
837 regval &= ~PHY_CTRL_ITAPDLY_SEL_MASK;
838 regval |= tap_delay << PHY_CTRL_ITAPDLY_SEL_SHIFT;
839 sdhci_writel(host, regval, PHY_CTRL_REG1);
840 regval &= ~PHY_CTRL_ITAP_CHG_WIN_MASK;
841 sdhci_writel(host, regval, PHY_CTRL_REG1);
847 static int arasan_sdhci_set_tapdelay(struct sdhci_host *host)
849 struct arasan_sdhci_priv *priv = dev_get_priv(host->mmc->dev);
850 struct arasan_sdhci_clk_data *clk_data = &priv->clk_data;
851 struct mmc *mmc = (struct mmc *)host->mmc;
852 struct udevice *dev = mmc->dev;
853 u8 timing = mode2timing[mmc->selected_mode];
854 u32 iclk_phase = clk_data->clk_phase_in[timing];
855 u32 oclk_phase = clk_data->clk_phase_out[timing];
858 dev_dbg(dev, "%s, host:%s, mode:%d\n", __func__, host->name, timing);
860 if (IS_ENABLED(CONFIG_ARCH_ZYNQMP) &&
861 device_is_compatible(dev, "xlnx,zynqmp-8.9a")) {
862 ret = sdhci_zynqmp_sampleclk_set_phase(host, iclk_phase);
866 ret = sdhci_zynqmp_sdcardclk_set_phase(host, oclk_phase);
869 } else if (IS_ENABLED(CONFIG_ARCH_VERSAL) &&
870 device_is_compatible(dev, "xlnx,versal-8.9a")) {
871 ret = sdhci_versal_sampleclk_set_phase(host, iclk_phase);
875 ret = sdhci_versal_sdcardclk_set_phase(host, oclk_phase);
878 } else if (IS_ENABLED(CONFIG_ARCH_VERSAL_NET) &&
879 device_is_compatible(dev, "xlnx,versal-net-emmc")) {
880 if (mmc->clock >= MIN_PHY_CLK_HZ)
881 if (iclk_phase == VERSAL_NET_EMMC_ICLK_PHASE_DDR52_DLY_CHAIN)
882 iclk_phase = VERSAL_NET_EMMC_ICLK_PHASE_DDR52_DLL;
884 ret = sdhci_versal_net_emmc_sampleclk_set_phase(host, iclk_phase);
888 ret = sdhci_versal_net_emmc_sdcardclk_set_phase(host, oclk_phase);
896 static void arasan_dt_read_clk_phase(struct udevice *dev, unsigned char timing,
899 struct arasan_sdhci_priv *priv = dev_get_priv(dev);
900 struct arasan_sdhci_clk_data *clk_data = &priv->clk_data;
901 u32 clk_phase[2] = {0};
904 * Read Tap Delay values from DT, if the DT does not contain the
905 * Tap Values then use the pre-defined values
907 if (dev_read_u32_array(dev, prop, &clk_phase[0], 2)) {
908 dev_dbg(dev, "Using predefined clock phase for %s = %d %d\n",
909 prop, clk_data->clk_phase_in[timing],
910 clk_data->clk_phase_out[timing]);
914 /* The values read are Input and Output Clock Delays in order */
915 clk_data->clk_phase_in[timing] = clk_phase[0];
916 clk_data->clk_phase_out[timing] = clk_phase[1];
920 * arasan_dt_parse_clk_phases - Read Tap Delay values from DT
922 * @dev: Pointer to our struct udevice.
924 * Called at initialization to parse the values of Tap Delays.
926 static void arasan_dt_parse_clk_phases(struct udevice *dev)
928 struct arasan_sdhci_priv *priv = dev_get_priv(dev);
929 struct arasan_sdhci_clk_data *clk_data = &priv->clk_data;
932 if (IS_ENABLED(CONFIG_ARCH_ZYNQMP) &&
933 device_is_compatible(dev, "xlnx,zynqmp-8.9a")) {
934 for (i = 0; i <= MMC_TIMING_MMC_HS400; i++) {
935 clk_data->clk_phase_in[i] = zynqmp_iclk_phases[i];
936 clk_data->clk_phase_out[i] = zynqmp_oclk_phases[i];
939 if (priv->bank == MMC_BANK2) {
940 clk_data->clk_phase_out[MMC_TIMING_UHS_SDR104] = 90;
941 clk_data->clk_phase_out[MMC_TIMING_MMC_HS200] = 90;
945 if (IS_ENABLED(CONFIG_ARCH_VERSAL) &&
946 device_is_compatible(dev, "xlnx,versal-8.9a")) {
947 for (i = 0; i <= MMC_TIMING_MMC_HS400; i++) {
948 clk_data->clk_phase_in[i] = versal_iclk_phases[i];
949 clk_data->clk_phase_out[i] = versal_oclk_phases[i];
953 if (IS_ENABLED(CONFIG_ARCH_VERSAL_NET) &&
954 device_is_compatible(dev, "xlnx,versal-net-emmc")) {
955 for (i = 0; i <= MMC_TIMING_MMC_HS400; i++) {
956 clk_data->clk_phase_in[i] = versal_net_emmc_iclk_phases[i];
957 clk_data->clk_phase_out[i] = versal_net_emmc_oclk_phases[i];
961 arasan_dt_read_clk_phase(dev, MMC_TIMING_LEGACY,
963 arasan_dt_read_clk_phase(dev, MMC_TIMING_MMC_HS,
965 arasan_dt_read_clk_phase(dev, MMC_TIMING_SD_HS,
967 arasan_dt_read_clk_phase(dev, MMC_TIMING_UHS_SDR12,
968 "clk-phase-uhs-sdr12");
969 arasan_dt_read_clk_phase(dev, MMC_TIMING_UHS_SDR25,
970 "clk-phase-uhs-sdr25");
971 arasan_dt_read_clk_phase(dev, MMC_TIMING_UHS_SDR50,
972 "clk-phase-uhs-sdr50");
973 arasan_dt_read_clk_phase(dev, MMC_TIMING_UHS_SDR104,
974 "clk-phase-uhs-sdr104");
975 arasan_dt_read_clk_phase(dev, MMC_TIMING_UHS_DDR50,
976 "clk-phase-uhs-ddr50");
977 arasan_dt_read_clk_phase(dev, MMC_TIMING_MMC_DDR52,
978 "clk-phase-mmc-ddr52");
979 arasan_dt_read_clk_phase(dev, MMC_TIMING_MMC_HS200,
980 "clk-phase-mmc-hs200");
981 arasan_dt_read_clk_phase(dev, MMC_TIMING_MMC_HS400,
982 "clk-phase-mmc-hs400");
985 static const struct sdhci_ops arasan_ops = {
986 .platform_execute_tuning = &arasan_sdhci_execute_tuning,
987 .set_delay = &arasan_sdhci_set_tapdelay,
988 .set_control_reg = &sdhci_set_control_reg,
989 #if defined(CONFIG_ARCH_VERSAL_NET)
990 .config_dll = &arasan_sdhci_config_dll,
995 #if defined(CONFIG_ARCH_ZYNQMP) && defined(CONFIG_ZYNQMP_FIRMWARE)
996 static int sdhci_zynqmp_set_dynamic_config(struct arasan_sdhci_priv *priv,
1001 unsigned long clock, mhz;
1003 ret = xilinx_pm_request(PM_REQUEST_NODE, priv->node_id,
1004 ZYNQMP_PM_CAPABILITY_ACCESS, ZYNQMP_PM_MAX_QOS,
1005 ZYNQMP_PM_REQUEST_ACK_NO, NULL);
1007 dev_err(dev, "Request node failed for %d\n", priv->node_id);
1011 ret = reset_get_bulk(dev, &priv->resets);
1012 if (ret == -ENOTSUPP || ret == -ENOENT) {
1013 dev_err(dev, "Reset not found\n");
1016 dev_err(dev, "Reset failed\n");
1020 ret = reset_assert_bulk(&priv->resets);
1022 dev_err(dev, "Reset assert failed\n");
1026 ret = zynqmp_pm_set_sd_config(priv->node_id, SD_CONFIG_FIXED, 0);
1028 dev_err(dev, "SD_CONFIG_FIXED failed\n");
1032 ret = zynqmp_pm_set_sd_config(priv->node_id, SD_CONFIG_EMMC_SEL,
1033 dev_read_bool(dev, "non-removable"));
1035 dev_err(dev, "SD_CONFIG_EMMC_SEL failed\n");
1039 ret = clk_get_by_index(dev, 0, &clk);
1041 dev_err(dev, "failed to get clock\n");
1045 clock = clk_get_rate(&clk);
1046 if (IS_ERR_VALUE(clock)) {
1047 dev_err(dev, "failed to get rate\n");
1051 mhz = DIV64_U64_ROUND_UP(clock, 1000000);
1053 if (mhz > 100 && mhz <= 200)
1055 else if (mhz > 50 && mhz <= 100)
1057 else if (mhz > 25 && mhz <= 50)
1062 ret = zynqmp_pm_set_sd_config(priv->node_id, SD_CONFIG_BASECLK, mhz);
1064 dev_err(dev, "SD_CONFIG_BASECLK failed\n");
1068 ret = zynqmp_pm_set_sd_config(priv->node_id, SD_CONFIG_8BIT,
1069 (dev_read_u32_default(dev, "bus-width", 1) == 8));
1071 dev_err(dev, "SD_CONFIG_8BIT failed\n");
1075 ret = reset_deassert_bulk(&priv->resets);
1077 dev_err(dev, "Reset release failed\n");
1085 static int arasan_sdhci_probe(struct udevice *dev)
1087 struct arasan_sdhci_plat *plat = dev_get_plat(dev);
1088 struct mmc_uclass_priv *upriv = dev_get_uclass_priv(dev);
1089 struct arasan_sdhci_priv *priv = dev_get_priv(dev);
1090 struct sdhci_host *host;
1092 unsigned long clock;
1097 #if defined(CONFIG_ARCH_ZYNQMP) && defined(CONFIG_ZYNQMP_FIRMWARE)
1098 if (device_is_compatible(dev, "xlnx,zynqmp-8.9a")) {
1099 ret = zynqmp_pm_is_function_supported(PM_IOCTL,
1100 IOCTL_SET_SD_CONFIG);
1102 ret = sdhci_zynqmp_set_dynamic_config(priv, dev);
1108 if (device_is_compatible(dev, "xlnx,versal-net-emmc"))
1109 priv->internal_phy_reg = true;
1111 ret = clk_get_by_index(dev, 0, &clk);
1113 dev_err(dev, "failed to get clock\n");
1117 clock = clk_get_rate(&clk);
1118 if (IS_ERR_VALUE(clock)) {
1119 dev_err(dev, "failed to get rate\n");
1123 dev_dbg(dev, "%s: CLK %ld\n", __func__, clock);
1125 ret = clk_enable(&clk);
1127 dev_err(dev, "failed to enable clock\n");
1131 host->quirks = SDHCI_QUIRK_WAIT_SEND_CMD |
1132 SDHCI_QUIRK_BROKEN_R1B;
1134 #ifdef CONFIG_ZYNQ_HISPD_BROKEN
1135 host->quirks |= SDHCI_QUIRK_BROKEN_HISPD_MODE;
1139 host->quirks |= SDHCI_QUIRK_NO_1_8_V;
1141 if (CONFIG_IS_ENABLED(ARCH_VERSAL_NET) &&
1142 device_is_compatible(dev, "xlnx,versal-net-emmc"))
1143 host->quirks |= SDHCI_QUIRK_CAPS_BIT63_FOR_HS400;
1145 plat->cfg.f_max = CONFIG_ZYNQ_SDHCI_MAX_FREQ;
1147 ret = mmc_of_parse(dev, &plat->cfg);
1151 host->max_clk = clock;
1153 host->mmc = &plat->mmc;
1154 host->mmc->dev = dev;
1155 host->mmc->priv = host;
1157 ret = sdhci_setup_cfg(&plat->cfg, host, plat->cfg.f_max,
1158 CONFIG_ZYNQ_SDHCI_MIN_FREQ);
1161 upriv->mmc = host->mmc;
1164 * WORKAROUND: Versal platforms have an issue with card detect state.
1165 * Due to this, host controller is switching off voltage to sd card
1166 * causing sd card timeout error. Workaround this by adding a wait for
1167 * 1000msec till the card detect state gets stable.
1169 if (IS_ENABLED(CONFIG_ARCH_ZYNQMP) || IS_ENABLED(CONFIG_ARCH_VERSAL)) {
1170 u32 timeout = 1000000;
1172 while (((sdhci_readl(host, SDHCI_PRESENT_STATE) &
1173 SDHCI_CARD_STATE_STABLE) == 0) && timeout) {
1178 dev_err(dev, "Sdhci card detect state not stable\n");
1183 return sdhci_probe(dev);
1186 static int arasan_sdhci_of_to_plat(struct udevice *dev)
1188 struct arasan_sdhci_priv *priv = dev_get_priv(dev);
1191 priv->host = calloc(1, sizeof(struct sdhci_host));
1195 priv->host->name = dev->name;
1197 #if defined(CONFIG_ARCH_ZYNQMP) || defined(CONFIG_ARCH_VERSAL) || defined(CONFIG_ARCH_VERSAL_NET)
1198 priv->host->ops = &arasan_ops;
1199 arasan_dt_parse_clk_phases(dev);
1202 priv->host->ioaddr = dev_read_addr_ptr(dev);
1203 if (!priv->host->ioaddr)
1206 priv->bank = dev_read_u32_default(dev, "xlnx,mio-bank", 0);
1207 priv->no_1p8 = dev_read_bool(dev, "no-1-8-v");
1210 if (!dev_read_u32_array(dev, "power-domains", pm_info, ARRAY_SIZE(pm_info)))
1211 priv->node_id = pm_info[1];
1216 static int arasan_sdhci_bind(struct udevice *dev)
1218 struct arasan_sdhci_plat *plat = dev_get_plat(dev);
1220 return sdhci_bind(dev, &plat->mmc, &plat->cfg);
1223 static const struct udevice_id arasan_sdhci_ids[] = {
1224 { .compatible = "arasan,sdhci-8.9a" },
1225 { .compatible = "xlnx,versal-net-emmc" },
1229 U_BOOT_DRIVER(arasan_sdhci_drv) = {
1230 .name = "arasan_sdhci",
1232 .of_match = arasan_sdhci_ids,
1233 .of_to_plat = arasan_sdhci_of_to_plat,
1235 .bind = arasan_sdhci_bind,
1236 .probe = arasan_sdhci_probe,
1237 .priv_auto = sizeof(struct arasan_sdhci_priv),
1238 .plat_auto = sizeof(struct arasan_sdhci_plat),