2 * (C) Copyright 2015 Google, Inc
3 * Copyright 2014 Rockchip Inc.
5 * SPDX-License-Identifier: GPL-2.0
7 * Adapted from coreboot.
13 #include <dt-structs.h>
19 #include <asm/arch/clock.h>
20 #include <asm/arch/cru_rk3288.h>
21 #include <asm/arch/ddr_rk3288.h>
22 #include <asm/arch/grf_rk3288.h>
23 #include <asm/arch/pmu_rk3288.h>
24 #include <asm/arch/sdram.h>
25 #include <asm/arch/sdram_common.h>
26 #include <linux/err.h>
27 #include <power/regulator.h>
28 #include <power/rk8xx_pmic.h>
30 DECLARE_GLOBAL_DATA_PTR;
33 struct rk3288_ddr_pctl *pctl;
34 struct rk3288_ddr_publ *publ;
35 struct rk3288_msch *msch;
39 struct chan_info chan[2];
42 struct rk3288_cru *cru;
43 struct rk3288_grf *grf;
44 struct rk3288_sgrf *sgrf;
45 struct rk3288_pmu *pmu;
49 struct rk3288_sdram_params {
50 #if CONFIG_IS_ENABLED(OF_PLATDATA)
51 struct dtd_rockchip_rk3288_dmc of_plat;
53 struct rk3288_sdram_channel ch[2];
54 struct rk3288_sdram_pctl_timing pctl_timing;
55 struct rk3288_sdram_phy_timing phy_timing;
56 struct rk3288_base_params base;
61 const int ddrconf_table[] = {
64 ((1 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
65 ((2 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
66 ((3 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
67 ((4 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
68 ((1 << DDRCONF_ROW_SHIFT) | 2 << DDRCONF_COL_SHIFT),
69 ((2 << DDRCONF_ROW_SHIFT) | 2 << DDRCONF_COL_SHIFT),
70 ((3 << DDRCONF_ROW_SHIFT) | 2 << DDRCONF_COL_SHIFT),
71 ((1 << DDRCONF_ROW_SHIFT) | 0 << DDRCONF_COL_SHIFT),
72 ((2 << DDRCONF_ROW_SHIFT) | 0 << DDRCONF_COL_SHIFT),
73 ((3 << DDRCONF_ROW_SHIFT) | 0 << DDRCONF_COL_SHIFT),
81 #define TEST_PATTEN 0x5aa5f00f
82 #define DQS_GATE_TRAINING_ERROR_RANK0 (1 << 4)
83 #define DQS_GATE_TRAINING_ERROR_RANK1 (2 << 4)
85 #ifdef CONFIG_SPL_BUILD
86 static void copy_to_reg(u32 *dest, const u32 *src, u32 n)
90 for (i = 0; i < n / sizeof(u32); i++) {
97 static void ddr_reset(struct rk3288_cru *cru, u32 ch, u32 ctl, u32 phy)
99 u32 phy_ctl_srstn_shift = 4 + 5 * ch;
100 u32 ctl_psrstn_shift = 3 + 5 * ch;
101 u32 ctl_srstn_shift = 2 + 5 * ch;
102 u32 phy_psrstn_shift = 1 + 5 * ch;
103 u32 phy_srstn_shift = 5 * ch;
105 rk_clrsetreg(&cru->cru_softrst_con[10],
106 1 << phy_ctl_srstn_shift | 1 << ctl_psrstn_shift |
107 1 << ctl_srstn_shift | 1 << phy_psrstn_shift |
108 1 << phy_srstn_shift,
109 phy << phy_ctl_srstn_shift | ctl << ctl_psrstn_shift |
110 ctl << ctl_srstn_shift | phy << phy_psrstn_shift |
111 phy << phy_srstn_shift);
114 static void ddr_phy_ctl_reset(struct rk3288_cru *cru, u32 ch, u32 n)
116 u32 phy_ctl_srstn_shift = 4 + 5 * ch;
118 rk_clrsetreg(&cru->cru_softrst_con[10],
119 1 << phy_ctl_srstn_shift, n << phy_ctl_srstn_shift);
122 static void phy_pctrl_reset(struct rk3288_cru *cru,
123 struct rk3288_ddr_publ *publ,
128 ddr_reset(cru, channel, 1, 1);
130 clrbits_le32(&publ->acdllcr, ACDLLCR_DLLSRST);
131 for (i = 0; i < 4; i++)
132 clrbits_le32(&publ->datx8[i].dxdllcr, DXDLLCR_DLLSRST);
135 setbits_le32(&publ->acdllcr, ACDLLCR_DLLSRST);
136 for (i = 0; i < 4; i++)
137 setbits_le32(&publ->datx8[i].dxdllcr, DXDLLCR_DLLSRST);
140 ddr_reset(cru, channel, 1, 0);
142 ddr_reset(cru, channel, 0, 0);
146 static void phy_dll_bypass_set(struct rk3288_ddr_publ *publ,
151 if (freq <= 250000000) {
152 if (freq <= 150000000)
153 clrbits_le32(&publ->dllgcr, SBIAS_BYPASS);
155 setbits_le32(&publ->dllgcr, SBIAS_BYPASS);
156 setbits_le32(&publ->acdllcr, ACDLLCR_DLLDIS);
157 for (i = 0; i < 4; i++)
158 setbits_le32(&publ->datx8[i].dxdllcr,
161 setbits_le32(&publ->pir, PIR_DLLBYP);
163 clrbits_le32(&publ->dllgcr, SBIAS_BYPASS);
164 clrbits_le32(&publ->acdllcr, ACDLLCR_DLLDIS);
165 for (i = 0; i < 4; i++) {
166 clrbits_le32(&publ->datx8[i].dxdllcr,
170 clrbits_le32(&publ->pir, PIR_DLLBYP);
174 static void dfi_cfg(struct rk3288_ddr_pctl *pctl, u32 dramtype)
176 writel(DFI_INIT_START, &pctl->dfistcfg0);
177 writel(DFI_DRAM_CLK_SR_EN | DFI_DRAM_CLK_DPD_EN,
179 writel(DFI_PARITY_INTR_EN | DFI_PARITY_EN, &pctl->dfistcfg2);
180 writel(7 << TLP_RESP_TIME_SHIFT | LP_SR_EN | LP_PD_EN,
183 writel(2 << TCTRL_DELAY_TIME_SHIFT, &pctl->dfitctrldelay);
184 writel(1 << TPHY_WRDATA_TIME_SHIFT, &pctl->dfitphywrdata);
185 writel(0xf << TPHY_RDLAT_TIME_SHIFT, &pctl->dfitphyrdlat);
186 writel(2 << TDRAM_CLK_DIS_TIME_SHIFT, &pctl->dfitdramclkdis);
187 writel(2 << TDRAM_CLK_EN_TIME_SHIFT, &pctl->dfitdramclken);
188 writel(1, &pctl->dfitphyupdtype0);
190 /* cs0 and cs1 write odt enable */
191 writel((RANK0_ODT_WRITE_SEL | RANK1_ODT_WRITE_SEL),
193 /* odt write length */
194 writel(7 << ODT_LEN_BL8_W_SHIFT, &pctl->dfiodtcfg1);
195 /* phyupd and ctrlupd disabled */
196 writel(0, &pctl->dfiupdcfg);
199 static void ddr_set_enable(struct rk3288_grf *grf, uint channel, bool enable)
204 val = 1 << (channel ? DDR1_16BIT_EN_SHIFT :
205 DDR0_16BIT_EN_SHIFT);
207 rk_clrsetreg(&grf->soc_con0,
208 1 << (channel ? DDR1_16BIT_EN_SHIFT : DDR0_16BIT_EN_SHIFT),
212 static void ddr_set_ddr3_mode(struct rk3288_grf *grf, uint channel,
217 mask = 1 << (channel ? MSCH1_MAINDDR3_SHIFT : MSCH0_MAINDDR3_SHIFT);
218 val = ddr3_mode << (channel ? MSCH1_MAINDDR3_SHIFT :
219 MSCH0_MAINDDR3_SHIFT);
220 rk_clrsetreg(&grf->soc_con0, mask, val);
223 static void ddr_set_en_bst_odt(struct rk3288_grf *grf, uint channel,
224 bool enable, bool enable_bst, bool enable_odt)
227 bool disable_bst = !enable_bst;
230 (1 << LPDDR3_EN1_SHIFT | 1 << UPCTL1_BST_DIABLE_SHIFT |
231 1 << UPCTL1_LPDDR3_ODT_EN_SHIFT) :
232 (1 << LPDDR3_EN0_SHIFT | 1 << UPCTL0_BST_DIABLE_SHIFT |
233 1 << UPCTL0_LPDDR3_ODT_EN_SHIFT);
234 rk_clrsetreg(&grf->soc_con2, mask,
235 enable << (channel ? LPDDR3_EN1_SHIFT : LPDDR3_EN0_SHIFT) |
236 disable_bst << (channel ? UPCTL1_BST_DIABLE_SHIFT :
237 UPCTL0_BST_DIABLE_SHIFT) |
238 enable_odt << (channel ? UPCTL1_LPDDR3_ODT_EN_SHIFT :
239 UPCTL0_LPDDR3_ODT_EN_SHIFT));
242 static void pctl_cfg(int channel, struct rk3288_ddr_pctl *pctl,
243 struct rk3288_sdram_params *sdram_params,
244 struct rk3288_grf *grf)
246 unsigned int burstlen;
248 burstlen = (sdram_params->base.noc_timing >> 18) & 0x7;
249 copy_to_reg(&pctl->togcnt1u, &sdram_params->pctl_timing.togcnt1u,
250 sizeof(sdram_params->pctl_timing));
251 switch (sdram_params->base.dramtype) {
253 writel(sdram_params->pctl_timing.tcl - 1,
254 &pctl->dfitrddataen);
255 writel(sdram_params->pctl_timing.tcwl,
256 &pctl->dfitphywrlat);
258 writel(LPDDR2_S4 | 0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT |
259 LPDDR2_EN | burstlen << BURSTLENGTH_SHIFT |
260 (6 - 4) << TFAW_SHIFT | PD_EXIT_FAST |
261 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
263 ddr_set_ddr3_mode(grf, channel, false);
264 ddr_set_enable(grf, channel, true);
265 ddr_set_en_bst_odt(grf, channel, true, false,
266 sdram_params->base.odt);
269 if (sdram_params->phy_timing.mr[1] & DDR3_DLL_DISABLE) {
270 writel(sdram_params->pctl_timing.tcl - 3,
271 &pctl->dfitrddataen);
273 writel(sdram_params->pctl_timing.tcl - 2,
274 &pctl->dfitrddataen);
276 writel(sdram_params->pctl_timing.tcwl - 1,
277 &pctl->dfitphywrlat);
278 writel(0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT | DDR3_EN |
279 DDR2_DDR3_BL_8 | (6 - 4) << TFAW_SHIFT | PD_EXIT_SLOW |
280 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
282 ddr_set_ddr3_mode(grf, channel, true);
283 ddr_set_enable(grf, channel, true);
285 ddr_set_en_bst_odt(grf, channel, false, true, false);
289 setbits_le32(&pctl->scfg, 1);
292 static void phy_cfg(const struct chan_info *chan, int channel,
293 struct rk3288_sdram_params *sdram_params)
295 struct rk3288_ddr_publ *publ = chan->publ;
296 struct rk3288_msch *msch = chan->msch;
297 uint ddr_freq_mhz = sdram_params->base.ddr_freq / 1000000;
301 dinit2 = DIV_ROUND_UP(ddr_freq_mhz * 200000, 1000);
303 copy_to_reg(&publ->dtpr[0], &sdram_params->phy_timing.dtpr0,
304 sizeof(sdram_params->phy_timing));
305 writel(sdram_params->base.noc_timing, &msch->ddrtiming);
306 writel(0x3f, &msch->readlatency);
307 writel(sdram_params->base.noc_activate, &msch->activate);
308 writel(2 << BUSWRTORD_SHIFT | 2 << BUSRDTOWR_SHIFT |
309 1 << BUSRDTORD_SHIFT, &msch->devtodev);
310 writel(DIV_ROUND_UP(ddr_freq_mhz * 5120, 1000) << PRT_DLLLOCK_SHIFT |
311 DIV_ROUND_UP(ddr_freq_mhz * 50, 1000) << PRT_DLLSRST_SHIFT |
312 8 << PRT_ITMSRST_SHIFT, &publ->ptr[0]);
313 writel(DIV_ROUND_UP(ddr_freq_mhz * 500000, 1000) << PRT_DINIT0_SHIFT |
314 DIV_ROUND_UP(ddr_freq_mhz * 400, 1000) << PRT_DINIT1_SHIFT,
316 writel(min(dinit2, 0x1ffffU) << PRT_DINIT2_SHIFT |
317 DIV_ROUND_UP(ddr_freq_mhz * 1000, 1000) << PRT_DINIT3_SHIFT,
320 switch (sdram_params->base.dramtype) {
322 clrsetbits_le32(&publ->pgcr, 0x1F,
323 0 << PGCR_DFTLMT_SHIFT |
324 0 << PGCR_DFTCMP_SHIFT |
325 1 << PGCR_DQSCFG_SHIFT |
326 0 << PGCR_ITMDMD_SHIFT);
327 /* DDRMODE select LPDDR3 */
328 clrsetbits_le32(&publ->dcr, DDRMD_MASK << DDRMD_SHIFT,
329 DDRMD_LPDDR2_LPDDR3 << DDRMD_SHIFT);
330 clrsetbits_le32(&publ->dxccr,
331 DQSNRES_MASK << DQSNRES_SHIFT |
332 DQSRES_MASK << DQSRES_SHIFT,
333 4 << DQSRES_SHIFT | 0xc << DQSNRES_SHIFT);
334 tmp = readl(&publ->dtpr[1]);
335 tmp = ((tmp >> TDQSCKMAX_SHIFT) & TDQSCKMAX_MASK) -
336 ((tmp >> TDQSCK_SHIFT) & TDQSCK_MASK);
337 clrsetbits_le32(&publ->dsgcr,
338 DQSGE_MASK << DQSGE_SHIFT |
339 DQSGX_MASK << DQSGX_SHIFT,
340 tmp << DQSGE_SHIFT | tmp << DQSGX_SHIFT);
343 clrbits_le32(&publ->pgcr, 0x1f);
344 clrsetbits_le32(&publ->dcr, DDRMD_MASK << DDRMD_SHIFT,
345 DDRMD_DDR3 << DDRMD_SHIFT);
348 if (sdram_params->base.odt) {
349 /*dynamic RTT enable */
350 for (i = 0; i < 4; i++)
351 setbits_le32(&publ->datx8[i].dxgcr, DQSRTT | DQRTT);
353 /*dynamic RTT disable */
354 for (i = 0; i < 4; i++)
355 clrbits_le32(&publ->datx8[i].dxgcr, DQSRTT | DQRTT);
359 static void phy_init(struct rk3288_ddr_publ *publ)
361 setbits_le32(&publ->pir, PIR_INIT | PIR_DLLSRST
362 | PIR_DLLLOCK | PIR_ZCAL | PIR_ITMSRST | PIR_CLRSR);
364 while ((readl(&publ->pgsr) &
365 (PGSR_IDONE | PGSR_DLDONE | PGSR_ZCDONE)) !=
366 (PGSR_IDONE | PGSR_DLDONE | PGSR_ZCDONE))
370 static void send_command(struct rk3288_ddr_pctl *pctl, u32 rank,
373 writel((START_CMD | (rank << 20) | arg | cmd), &pctl->mcmd);
375 while (readl(&pctl->mcmd) & START_CMD)
379 static inline void send_command_op(struct rk3288_ddr_pctl *pctl,
380 u32 rank, u32 cmd, u32 ma, u32 op)
382 send_command(pctl, rank, cmd, (ma & LPDDR2_MA_MASK) << LPDDR2_MA_SHIFT |
383 (op & LPDDR2_OP_MASK) << LPDDR2_OP_SHIFT);
386 static void memory_init(struct rk3288_ddr_publ *publ,
389 setbits_le32(&publ->pir,
390 (PIR_INIT | PIR_DRAMINIT | PIR_LOCKBYP
391 | PIR_ZCALBYP | PIR_CLRSR | PIR_ICPC
392 | (dramtype == DDR3 ? PIR_DRAMRST : 0)));
394 while ((readl(&publ->pgsr) & (PGSR_IDONE | PGSR_DLDONE))
395 != (PGSR_IDONE | PGSR_DLDONE))
399 static void move_to_config_state(struct rk3288_ddr_publ *publ,
400 struct rk3288_ddr_pctl *pctl)
405 state = readl(&pctl->stat) & PCTL_STAT_MSK;
409 writel(WAKEUP_STATE, &pctl->sctl);
410 while ((readl(&pctl->stat) & PCTL_STAT_MSK)
414 while ((readl(&publ->pgsr) & PGSR_DLDONE)
418 * if at low power state,need wakeup first,
419 * and then enter the config
425 writel(CFG_STATE, &pctl->sctl);
426 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG)
437 static void set_bandwidth_ratio(const struct chan_info *chan, int channel,
438 u32 n, struct rk3288_grf *grf)
440 struct rk3288_ddr_pctl *pctl = chan->pctl;
441 struct rk3288_ddr_publ *publ = chan->publ;
442 struct rk3288_msch *msch = chan->msch;
445 setbits_le32(&pctl->ppcfg, 1);
446 rk_setreg(&grf->soc_con0, 1 << (8 + channel));
447 setbits_le32(&msch->ddrtiming, 1 << 31);
448 /* Data Byte disable*/
449 clrbits_le32(&publ->datx8[2].dxgcr, 1);
450 clrbits_le32(&publ->datx8[3].dxgcr, 1);
452 setbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLDIS);
453 setbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLDIS);
455 clrbits_le32(&pctl->ppcfg, 1);
456 rk_clrreg(&grf->soc_con0, 1 << (8 + channel));
457 clrbits_le32(&msch->ddrtiming, 1 << 31);
458 /* Data Byte enable*/
459 setbits_le32(&publ->datx8[2].dxgcr, 1);
460 setbits_le32(&publ->datx8[3].dxgcr, 1);
463 clrbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLDIS);
464 clrbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLDIS);
466 clrbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLSRST);
467 clrbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLSRST);
469 setbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLSRST);
470 setbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLSRST);
472 setbits_le32(&pctl->dfistcfg0, 1 << 2);
475 static int data_training(const struct chan_info *chan, int channel,
476 struct rk3288_sdram_params *sdram_params)
482 u32 step[2] = { PIR_QSTRN, PIR_RVTRN };
483 struct rk3288_ddr_publ *publ = chan->publ;
484 struct rk3288_ddr_pctl *pctl = chan->pctl;
486 /* disable auto refresh */
487 writel(0, &pctl->trefi);
489 if (sdram_params->base.dramtype != LPDDR3)
490 setbits_le32(&publ->pgcr, 1 << PGCR_DQSCFG_SHIFT);
491 rank = sdram_params->ch[channel].rank | 1;
492 for (j = 0; j < ARRAY_SIZE(step); j++) {
494 * trigger QSTRN and RVTRN
495 * clear DTDONE status
497 setbits_le32(&publ->pir, PIR_CLRSR);
500 setbits_le32(&publ->pir,
501 PIR_INIT | step[j] | PIR_LOCKBYP | PIR_ZCALBYP |
504 /* wait echo byte DTDONE */
505 while ((readl(&publ->datx8[0].dxgsr[0]) & rank)
508 while ((readl(&publ->datx8[1].dxgsr[0]) & rank)
511 if (!(readl(&pctl->ppcfg) & 1)) {
512 while ((readl(&publ->datx8[2].dxgsr[0])
515 while ((readl(&publ->datx8[3].dxgsr[0])
519 if (readl(&publ->pgsr) &
520 (PGSR_DTERR | PGSR_RVERR | PGSR_RVEIRR)) {
525 /* send some auto refresh to complement the lost while DTT */
526 for (i = 0; i < (rank > 1 ? 8 : 4); i++)
527 send_command(pctl, rank, REF_CMD, 0);
529 if (sdram_params->base.dramtype != LPDDR3)
530 clrbits_le32(&publ->pgcr, 1 << PGCR_DQSCFG_SHIFT);
532 /* resume auto refresh */
533 writel(sdram_params->pctl_timing.trefi, &pctl->trefi);
538 static void move_to_access_state(const struct chan_info *chan)
540 struct rk3288_ddr_publ *publ = chan->publ;
541 struct rk3288_ddr_pctl *pctl = chan->pctl;
545 state = readl(&pctl->stat) & PCTL_STAT_MSK;
549 if (((readl(&pctl->stat) >> LP_TRIG_SHIFT) &
553 writel(WAKEUP_STATE, &pctl->sctl);
554 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != ACCESS)
557 while ((readl(&publ->pgsr) & PGSR_DLDONE)
562 writel(CFG_STATE, &pctl->sctl);
563 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG)
566 writel(GO_STATE, &pctl->sctl);
567 while ((readl(&pctl->stat) & PCTL_STAT_MSK) == CONFIG)
578 static void dram_cfg_rbc(const struct chan_info *chan, u32 chnum,
579 struct rk3288_sdram_params *sdram_params)
581 struct rk3288_ddr_publ *publ = chan->publ;
583 if (sdram_params->ch[chnum].bk == 3)
584 clrsetbits_le32(&publ->dcr, PDQ_MASK << PDQ_SHIFT,
587 clrbits_le32(&publ->dcr, PDQ_MASK << PDQ_SHIFT);
589 writel(sdram_params->base.ddrconfig, &chan->msch->ddrconf);
592 static void dram_all_config(const struct dram_info *dram,
593 struct rk3288_sdram_params *sdram_params)
598 sys_reg |= sdram_params->base.dramtype << SYS_REG_DDRTYPE_SHIFT;
599 sys_reg |= (sdram_params->num_channels - 1) << SYS_REG_NUM_CH_SHIFT;
600 for (chan = 0; chan < sdram_params->num_channels; chan++) {
601 const struct rk3288_sdram_channel *info =
602 &sdram_params->ch[chan];
604 sys_reg |= info->row_3_4 << SYS_REG_ROW_3_4_SHIFT(chan);
605 sys_reg |= 1 << SYS_REG_CHINFO_SHIFT(chan);
606 sys_reg |= (info->rank - 1) << SYS_REG_RANK_SHIFT(chan);
607 sys_reg |= (info->col - 9) << SYS_REG_COL_SHIFT(chan);
608 sys_reg |= info->bk == 3 ? 0 : 1 << SYS_REG_BK_SHIFT(chan);
609 sys_reg |= (info->cs0_row - 13) << SYS_REG_CS0_ROW_SHIFT(chan);
610 sys_reg |= (info->cs1_row - 13) << SYS_REG_CS1_ROW_SHIFT(chan);
611 sys_reg |= (2 >> info->bw) << SYS_REG_BW_SHIFT(chan);
612 sys_reg |= (2 >> info->dbw) << SYS_REG_DBW_SHIFT(chan);
614 dram_cfg_rbc(&dram->chan[chan], chan, sdram_params);
616 writel(sys_reg, &dram->pmu->sys_reg[2]);
617 rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, sdram_params->base.stride);
620 static int sdram_rank_bw_detect(struct dram_info *dram, int channel,
621 struct rk3288_sdram_params *sdram_params)
624 int need_trainig = 0;
625 const struct chan_info *chan = &dram->chan[channel];
626 struct rk3288_ddr_publ *publ = chan->publ;
628 if (data_training(chan, channel, sdram_params) < 0) {
629 reg = readl(&publ->datx8[0].dxgsr[0]);
630 /* Check the result for rank 0 */
631 if ((channel == 0) && (reg & DQS_GATE_TRAINING_ERROR_RANK0)) {
632 debug("data training fail!\n");
634 } else if ((channel == 1) &&
635 (reg & DQS_GATE_TRAINING_ERROR_RANK0)) {
636 sdram_params->num_channels = 1;
639 /* Check the result for rank 1 */
640 if (reg & DQS_GATE_TRAINING_ERROR_RANK1) {
641 sdram_params->ch[channel].rank = 1;
642 clrsetbits_le32(&publ->pgcr, 0xF << 18,
643 sdram_params->ch[channel].rank << 18);
646 reg = readl(&publ->datx8[2].dxgsr[0]);
647 if (reg & (1 << 4)) {
648 sdram_params->ch[channel].bw = 1;
649 set_bandwidth_ratio(chan, channel,
650 sdram_params->ch[channel].bw,
655 /* Assume the Die bit width are the same with the chip bit width */
656 sdram_params->ch[channel].dbw = sdram_params->ch[channel].bw;
659 (data_training(chan, channel, sdram_params) < 0)) {
660 if (sdram_params->base.dramtype == LPDDR3) {
661 ddr_phy_ctl_reset(dram->cru, channel, 1);
663 ddr_phy_ctl_reset(dram->cru, channel, 0);
666 debug("2nd data training failed!");
673 static int sdram_col_row_detect(struct dram_info *dram, int channel,
674 struct rk3288_sdram_params *sdram_params)
678 const struct chan_info *chan = &dram->chan[channel];
679 struct rk3288_ddr_pctl *pctl = chan->pctl;
680 struct rk3288_ddr_publ *publ = chan->publ;
684 for (col = 11; col >= 9; col--) {
685 writel(0, CONFIG_SYS_SDRAM_BASE);
686 addr = CONFIG_SYS_SDRAM_BASE +
687 (1 << (col + sdram_params->ch[channel].bw - 1));
688 writel(TEST_PATTEN, addr);
689 if ((readl(addr) == TEST_PATTEN) &&
690 (readl(CONFIG_SYS_SDRAM_BASE) == 0))
694 printf("Col detect error\n");
698 sdram_params->ch[channel].col = col;
701 move_to_config_state(publ, pctl);
702 writel(4, &chan->msch->ddrconf);
703 move_to_access_state(chan);
705 for (row = 16; row >= 12; row--) {
706 writel(0, CONFIG_SYS_SDRAM_BASE);
707 addr = CONFIG_SYS_SDRAM_BASE + (1 << (row + 15 - 1));
708 writel(TEST_PATTEN, addr);
709 if ((readl(addr) == TEST_PATTEN) &&
710 (readl(CONFIG_SYS_SDRAM_BASE) == 0))
714 printf("Row detect error\n");
717 sdram_params->ch[channel].cs1_row = row;
718 sdram_params->ch[channel].row_3_4 = 0;
719 debug("chn %d col %d, row %d\n", channel, col, row);
720 sdram_params->ch[channel].cs0_row = row;
727 static int sdram_get_niu_config(struct rk3288_sdram_params *sdram_params)
729 int i, tmp, size, ret = 0;
731 tmp = sdram_params->ch[0].col - 9;
732 tmp -= (sdram_params->ch[0].bw == 2) ? 0 : 1;
733 tmp |= ((sdram_params->ch[0].cs0_row - 12) << 4);
734 size = sizeof(ddrconf_table)/sizeof(ddrconf_table[0]);
735 for (i = 0; i < size; i++)
736 if (tmp == ddrconf_table[i])
739 printf("niu config not found\n");
742 sdram_params->base.ddrconfig = i;
748 static int sdram_get_stride(struct rk3288_sdram_params *sdram_params)
752 long cap = sdram_params->num_channels * (1u <<
753 (sdram_params->ch[0].cs0_row +
754 sdram_params->ch[0].col +
755 (sdram_params->ch[0].rank - 1) +
756 sdram_params->ch[0].bw +
774 printf("could not find correct stride, cap error!\n");
778 sdram_params->base.stride = stride;
783 static int sdram_init(struct dram_info *dram,
784 struct rk3288_sdram_params *sdram_params)
790 debug("%s start\n", __func__);
791 if ((sdram_params->base.dramtype == DDR3 &&
792 sdram_params->base.ddr_freq > 800000000) ||
793 (sdram_params->base.dramtype == LPDDR3 &&
794 sdram_params->base.ddr_freq > 533000000)) {
795 debug("SDRAM frequency is too high!");
799 debug("ddr clk dpll\n");
800 ret = clk_set_rate(&dram->ddr_clk, sdram_params->base.ddr_freq);
801 debug("ret=%d\n", ret);
803 debug("Could not set DDR clock\n");
807 for (channel = 0; channel < 2; channel++) {
808 const struct chan_info *chan = &dram->chan[channel];
809 struct rk3288_ddr_pctl *pctl = chan->pctl;
810 struct rk3288_ddr_publ *publ = chan->publ;
812 /* map all the 4GB space to the current channel */
814 rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, 0x17);
816 rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, 0x1a);
817 phy_pctrl_reset(dram->cru, publ, channel);
818 phy_dll_bypass_set(publ, sdram_params->base.ddr_freq);
820 dfi_cfg(pctl, sdram_params->base.dramtype);
822 pctl_cfg(channel, pctl, sdram_params, dram->grf);
824 phy_cfg(chan, channel, sdram_params);
828 writel(POWER_UP_START, &pctl->powctl);
829 while (!(readl(&pctl->powstat) & POWER_UP_DONE))
832 memory_init(publ, sdram_params->base.dramtype);
833 move_to_config_state(publ, pctl);
835 if (sdram_params->base.dramtype == LPDDR3) {
836 send_command(pctl, 3, DESELECT_CMD, 0);
838 send_command(pctl, 3, PREA_CMD, 0);
840 send_command_op(pctl, 3, MRS_CMD, 63, 0xfc);
842 send_command_op(pctl, 3, MRS_CMD, 1,
843 sdram_params->phy_timing.mr[1]);
845 send_command_op(pctl, 3, MRS_CMD, 2,
846 sdram_params->phy_timing.mr[2]);
848 send_command_op(pctl, 3, MRS_CMD, 3,
849 sdram_params->phy_timing.mr[3]);
853 /* Using 32bit bus width for detect */
854 sdram_params->ch[channel].bw = 2;
855 set_bandwidth_ratio(chan, channel,
856 sdram_params->ch[channel].bw, dram->grf);
858 * set cs, using n=3 for detect
863 sdram_params->ch[channel].rank = 2,
864 clrsetbits_le32(&publ->pgcr, 0xF << 18,
865 (sdram_params->ch[channel].rank | 1) << 18);
867 /* DS=40ohm,ODT=155ohm */
868 zqcr = 1 << ZDEN_SHIFT | 2 << PU_ONDIE_SHIFT |
869 2 << PD_ONDIE_SHIFT | 0x19 << PU_OUTPUT_SHIFT |
870 0x19 << PD_OUTPUT_SHIFT;
871 writel(zqcr, &publ->zq1cr[0]);
872 writel(zqcr, &publ->zq0cr[0]);
874 if (sdram_params->base.dramtype == LPDDR3) {
875 /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
877 send_command_op(pctl,
878 sdram_params->ch[channel].rank | 1,
880 sdram_params->base.odt ? 3 : 0);
882 writel(0, &pctl->mrrcfg0);
883 send_command_op(pctl, 1, MRR_CMD, 8, 0);
885 if ((readl(&pctl->mrrstat0) & 0x3) != 3) {
892 /* Detect the rank and bit-width with data-training */
893 sdram_rank_bw_detect(dram, channel, sdram_params);
895 if (sdram_params->base.dramtype == LPDDR3) {
897 writel(0, &pctl->mrrcfg0);
898 for (i = 0; i < 17; i++)
899 send_command_op(pctl, 1, MRR_CMD, i, 0);
901 writel(15, &chan->msch->ddrconf);
902 move_to_access_state(chan);
903 /* DDR3 and LPDDR3 are always 8 bank, no need detect */
904 sdram_params->ch[channel].bk = 3;
905 /* Detect Col and Row number*/
906 ret = sdram_col_row_detect(dram, channel, sdram_params);
910 /* Find NIU DDR configuration */
911 ret = sdram_get_niu_config(sdram_params);
914 /* Find stride setting */
915 ret = sdram_get_stride(sdram_params);
919 dram_all_config(dram, sdram_params);
920 debug("%s done\n", __func__);
924 printf("DRAM init failed!\n");
928 # ifdef CONFIG_ROCKCHIP_FAST_SPL
929 static int veyron_init(struct dram_info *priv)
931 struct udevice *pmic;
934 ret = uclass_first_device_err(UCLASS_PMIC, &pmic);
938 /* Slowly raise to max CPU voltage to prevent overshoot */
939 ret = rk8xx_spl_configure_buck(pmic, 1, 1200000);
942 udelay(175);/* Must wait for voltage to stabilize, 2mV/us */
943 ret = rk8xx_spl_configure_buck(pmic, 1, 1400000);
946 udelay(100);/* Must wait for voltage to stabilize, 2mV/us */
948 rk3288_clk_configure_cpu(priv->cru, priv->grf);
954 static int setup_sdram(struct udevice *dev)
956 struct dram_info *priv = dev_get_priv(dev);
957 struct rk3288_sdram_params *params = dev_get_platdata(dev);
959 # ifdef CONFIG_ROCKCHIP_FAST_SPL
960 if (priv->is_veyron) {
963 ret = veyron_init(priv);
969 return sdram_init(priv, params);
972 static int rk3288_dmc_ofdata_to_platdata(struct udevice *dev)
974 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
975 struct rk3288_sdram_params *params = dev_get_platdata(dev);
978 /* Rk3288 supports dual-channel, set default channel num to 2 */
979 params->num_channels = 2;
980 ret = dev_read_u32_array(dev, "rockchip,pctl-timing",
981 (u32 *)¶ms->pctl_timing,
982 sizeof(params->pctl_timing) / sizeof(u32));
984 debug("%s: Cannot read rockchip,pctl-timing\n", __func__);
987 ret = dev_read_u32_array(dev, "rockchip,phy-timing",
988 (u32 *)¶ms->phy_timing,
989 sizeof(params->phy_timing) / sizeof(u32));
991 debug("%s: Cannot read rockchip,phy-timing\n", __func__);
994 ret = dev_read_u32_array(dev, "rockchip,sdram-params",
995 (u32 *)¶ms->base,
996 sizeof(params->base) / sizeof(u32));
998 debug("%s: Cannot read rockchip,sdram-params\n", __func__);
1001 #ifdef CONFIG_ROCKCHIP_FAST_SPL
1002 struct dram_info *priv = dev_get_priv(dev);
1004 priv->is_veyron = !fdt_node_check_compatible(blob, 0, "google,veyron");
1006 ret = regmap_init_mem(dev, ¶ms->map);
1013 #endif /* CONFIG_SPL_BUILD */
1015 #if CONFIG_IS_ENABLED(OF_PLATDATA)
1016 static int conv_of_platdata(struct udevice *dev)
1018 struct rk3288_sdram_params *plat = dev_get_platdata(dev);
1019 struct dtd_rockchip_rk3288_dmc *of_plat = &plat->of_plat;
1022 memcpy(&plat->pctl_timing, of_plat->rockchip_pctl_timing,
1023 sizeof(plat->pctl_timing));
1024 memcpy(&plat->phy_timing, of_plat->rockchip_phy_timing,
1025 sizeof(plat->phy_timing));
1026 memcpy(&plat->base, of_plat->rockchip_sdram_params, sizeof(plat->base));
1027 /* Rk3288 supports dual-channel, set default channel num to 2 */
1028 plat->num_channels = 2;
1029 ret = regmap_init_mem_platdata(dev, of_plat->reg,
1030 ARRAY_SIZE(of_plat->reg) / 2,
1039 static int rk3288_dmc_probe(struct udevice *dev)
1041 #ifdef CONFIG_SPL_BUILD
1042 struct rk3288_sdram_params *plat = dev_get_platdata(dev);
1043 struct udevice *dev_clk;
1047 struct dram_info *priv = dev_get_priv(dev);
1049 priv->pmu = syscon_get_first_range(ROCKCHIP_SYSCON_PMU);
1050 #ifdef CONFIG_SPL_BUILD
1051 #if CONFIG_IS_ENABLED(OF_PLATDATA)
1052 ret = conv_of_platdata(dev);
1056 map = syscon_get_regmap_by_driver_data(ROCKCHIP_SYSCON_NOC);
1058 return PTR_ERR(map);
1059 priv->chan[0].msch = regmap_get_range(map, 0);
1060 priv->chan[1].msch = (struct rk3288_msch *)
1061 (regmap_get_range(map, 0) + 0x80);
1063 priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
1064 priv->sgrf = syscon_get_first_range(ROCKCHIP_SYSCON_SGRF);
1066 priv->chan[0].pctl = regmap_get_range(plat->map, 0);
1067 priv->chan[0].publ = regmap_get_range(plat->map, 1);
1068 priv->chan[1].pctl = regmap_get_range(plat->map, 2);
1069 priv->chan[1].publ = regmap_get_range(plat->map, 3);
1071 ret = rockchip_get_clk(&dev_clk);
1074 priv->ddr_clk.id = CLK_DDR;
1075 ret = clk_request(dev_clk, &priv->ddr_clk);
1079 priv->cru = rockchip_get_cru();
1080 if (IS_ERR(priv->cru))
1081 return PTR_ERR(priv->cru);
1082 ret = setup_sdram(dev);
1086 priv->info.base = CONFIG_SYS_SDRAM_BASE;
1087 priv->info.size = rockchip_sdram_size(
1088 (phys_addr_t)&priv->pmu->sys_reg[2]);
1094 static int rk3288_dmc_get_info(struct udevice *dev, struct ram_info *info)
1096 struct dram_info *priv = dev_get_priv(dev);
1103 static struct ram_ops rk3288_dmc_ops = {
1104 .get_info = rk3288_dmc_get_info,
1107 static const struct udevice_id rk3288_dmc_ids[] = {
1108 { .compatible = "rockchip,rk3288-dmc" },
1112 U_BOOT_DRIVER(dmc_rk3288) = {
1113 .name = "rockchip_rk3288_dmc",
1115 .of_match = rk3288_dmc_ids,
1116 .ops = &rk3288_dmc_ops,
1117 #ifdef CONFIG_SPL_BUILD
1118 .ofdata_to_platdata = rk3288_dmc_ofdata_to_platdata,
1120 .probe = rk3288_dmc_probe,
1121 .priv_auto_alloc_size = sizeof(struct dram_info),
1122 #ifdef CONFIG_SPL_BUILD
1123 .platdata_auto_alloc_size = sizeof(struct rk3288_sdram_params),