2 * DDR3 mem setup file for board based on EXYNOS5
4 * Copyright (C) 2012 Samsung Electronics
6 * SPDX-License-Identifier: GPL-2.0+
11 #include <asm/arch/clock.h>
12 #include <asm/arch/cpu.h>
13 #include <asm/arch/dmc.h>
14 #include <asm/arch/power.h>
15 #include "common_setup.h"
16 #include "exynos5_setup.h"
17 #include "clock_init.h"
21 #ifdef CONFIG_EXYNOS5250
22 static void reset_phy_ctrl(void)
24 struct exynos5_clock *clk =
25 (struct exynos5_clock *)samsung_get_base_clock();
27 writel(DDR3PHY_CTRL_PHY_RESET_OFF, &clk->lpddr3phy_ctrl);
28 writel(DDR3PHY_CTRL_PHY_RESET, &clk->lpddr3phy_ctrl);
31 int ddr3_mem_ctrl_init(struct mem_timings *mem, int reset)
34 struct exynos5_phy_control *phy0_ctrl, *phy1_ctrl;
35 struct exynos5_dmc *dmc;
38 phy0_ctrl = (struct exynos5_phy_control *)samsung_get_base_dmc_phy();
39 phy1_ctrl = (struct exynos5_phy_control *)(samsung_get_base_dmc_phy()
41 dmc = (struct exynos5_dmc *)samsung_get_base_dmc_ctrl();
46 /* Set Impedance Output Driver */
47 val = (mem->impedance << CA_CK_DRVR_DS_OFFSET) |
48 (mem->impedance << CA_CKE_DRVR_DS_OFFSET) |
49 (mem->impedance << CA_CS_DRVR_DS_OFFSET) |
50 (mem->impedance << CA_ADR_DRVR_DS_OFFSET);
51 writel(val, &phy0_ctrl->phy_con39);
52 writel(val, &phy1_ctrl->phy_con39);
54 /* Set Read Latency and Burst Length for PHY0 and PHY1 */
55 val = (mem->ctrl_bstlen << PHY_CON42_CTRL_BSTLEN_SHIFT) |
56 (mem->ctrl_rdlat << PHY_CON42_CTRL_RDLAT_SHIFT);
57 writel(val, &phy0_ctrl->phy_con42);
58 writel(val, &phy1_ctrl->phy_con42);
61 if (dmc_config_zq(mem, &phy0_ctrl->phy_con16, &phy1_ctrl->phy_con16,
62 &phy0_ctrl->phy_con17, &phy1_ctrl->phy_con17))
63 return SETUP_ERR_ZQ_CALIBRATION_FAILURE;
66 writel(mem->phy0_pulld_dqs, &phy0_ctrl->phy_con14);
67 writel(mem->phy1_pulld_dqs, &phy1_ctrl->phy_con14);
69 writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)
70 | (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT),
73 update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
76 writel(mem->phy0_dqs, &phy0_ctrl->phy_con4);
77 writel(mem->phy1_dqs, &phy1_ctrl->phy_con4);
79 writel(mem->phy0_dq, &phy0_ctrl->phy_con6);
80 writel(mem->phy1_dq, &phy1_ctrl->phy_con6);
82 writel(mem->phy0_tFS, &phy0_ctrl->phy_con10);
83 writel(mem->phy1_tFS, &phy1_ctrl->phy_con10);
85 val = (mem->ctrl_start_point << PHY_CON12_CTRL_START_POINT_SHIFT) |
86 (mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
87 (mem->ctrl_dll_on << PHY_CON12_CTRL_DLL_ON_SHIFT) |
88 (mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
89 writel(val, &phy0_ctrl->phy_con12);
90 writel(val, &phy1_ctrl->phy_con12);
92 /* Start DLL locking */
93 writel(val | (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT),
94 &phy0_ctrl->phy_con12);
95 writel(val | (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT),
96 &phy1_ctrl->phy_con12);
98 update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
100 writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
103 /* Memory Channel Inteleaving Size */
104 writel(mem->iv_size, &dmc->ivcontrol);
106 writel(mem->memconfig, &dmc->memconfig0);
107 writel(mem->memconfig, &dmc->memconfig1);
108 writel(mem->membaseconfig0, &dmc->membaseconfig0);
109 writel(mem->membaseconfig1, &dmc->membaseconfig1);
111 /* Precharge Configuration */
112 writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
115 /* Power Down mode Configuration */
116 writel(mem->dpwrdn_cyc << PWRDNCONFIG_DPWRDN_CYC_SHIFT |
117 mem->dsref_cyc << PWRDNCONFIG_DSREF_CYC_SHIFT,
120 /* TimingRow, TimingData, TimingPower and Timingaref
121 * values as per Memory AC parameters
123 writel(mem->timing_ref, &dmc->timingref);
124 writel(mem->timing_row, &dmc->timingrow);
125 writel(mem->timing_data, &dmc->timingdata);
126 writel(mem->timing_power, &dmc->timingpower);
128 /* Send PALL command */
129 dmc_config_prech(mem, &dmc->directcmd);
131 /* Send NOP, MRS and ZQINIT commands */
132 dmc_config_mrs(mem, &dmc->directcmd);
134 if (mem->gate_leveling_enable) {
135 val = PHY_CON0_RESET_VAL;
137 writel(val, &phy0_ctrl->phy_con0);
138 writel(val, &phy1_ctrl->phy_con0);
140 val = PHY_CON2_RESET_VAL;
141 val |= INIT_DESKEW_EN;
142 writel(val, &phy0_ctrl->phy_con2);
143 writel(val, &phy1_ctrl->phy_con2);
145 val = PHY_CON0_RESET_VAL;
147 val |= BYTE_RDLVL_EN;
148 writel(val, &phy0_ctrl->phy_con0);
149 writel(val, &phy1_ctrl->phy_con0);
151 val = (mem->ctrl_start_point <<
152 PHY_CON12_CTRL_START_POINT_SHIFT) |
153 (mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
154 (mem->ctrl_force << PHY_CON12_CTRL_FORCE_SHIFT) |
155 (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT) |
156 (mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
157 writel(val, &phy0_ctrl->phy_con12);
158 writel(val, &phy1_ctrl->phy_con12);
160 val = PHY_CON2_RESET_VAL;
161 val |= INIT_DESKEW_EN;
162 val |= RDLVL_GATE_EN;
163 writel(val, &phy0_ctrl->phy_con2);
164 writel(val, &phy1_ctrl->phy_con2);
166 val = PHY_CON0_RESET_VAL;
168 val |= BYTE_RDLVL_EN;
170 writel(val, &phy0_ctrl->phy_con0);
171 writel(val, &phy1_ctrl->phy_con0);
173 val = PHY_CON1_RESET_VAL;
174 val &= ~(CTRL_GATEDURADJ_MASK);
175 writel(val, &phy0_ctrl->phy_con1);
176 writel(val, &phy1_ctrl->phy_con1);
178 writel(CTRL_RDLVL_GATE_ENABLE, &dmc->rdlvl_config);
180 while ((readl(&dmc->phystatus) &
181 (RDLVL_COMPLETE_CHO | RDLVL_COMPLETE_CH1)) !=
182 (RDLVL_COMPLETE_CHO | RDLVL_COMPLETE_CH1) && i > 0) {
184 * TODO(waihong): Comment on how long this take to
191 return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
192 writel(CTRL_RDLVL_GATE_DISABLE, &dmc->rdlvl_config);
194 writel(0, &phy0_ctrl->phy_con14);
195 writel(0, &phy1_ctrl->phy_con14);
197 val = (mem->ctrl_start_point <<
198 PHY_CON12_CTRL_START_POINT_SHIFT) |
199 (mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
200 (mem->ctrl_force << PHY_CON12_CTRL_FORCE_SHIFT) |
201 (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT) |
202 (mem->ctrl_dll_on << PHY_CON12_CTRL_DLL_ON_SHIFT) |
203 (mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
204 writel(val, &phy0_ctrl->phy_con12);
205 writel(val, &phy1_ctrl->phy_con12);
207 update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
210 /* Send PALL command */
211 dmc_config_prech(mem, &dmc->directcmd);
213 writel(mem->memcontrol, &dmc->memcontrol);
215 /* Set DMC Concontrol and enable auto-refresh counter */
216 writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)
217 | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT), &dmc->concontrol);
222 #ifdef CONFIG_EXYNOS5420
223 int ddr3_mem_ctrl_init(struct mem_timings *mem, int reset)
225 struct exynos5420_clock *clk =
226 (struct exynos5420_clock *)samsung_get_base_clock();
227 struct exynos5420_power *power =
228 (struct exynos5420_power *)samsung_get_base_power();
229 struct exynos5420_phy_control *phy0_ctrl, *phy1_ctrl;
230 struct exynos5420_dmc *drex0, *drex1;
231 struct exynos5420_tzasc *tzasc0, *tzasc1;
232 uint32_t val, n_lock_r, n_lock_w_phy0, n_lock_w_phy1;
233 uint32_t lock0_info, lock1_info;
237 phy0_ctrl = (struct exynos5420_phy_control *)samsung_get_base_dmc_phy();
238 phy1_ctrl = (struct exynos5420_phy_control *)(samsung_get_base_dmc_phy()
240 drex0 = (struct exynos5420_dmc *)samsung_get_base_dmc_ctrl();
241 drex1 = (struct exynos5420_dmc *)(samsung_get_base_dmc_ctrl()
243 tzasc0 = (struct exynos5420_tzasc *)samsung_get_base_dmc_tzasc();
244 tzasc1 = (struct exynos5420_tzasc *)(samsung_get_base_dmc_tzasc()
246 /* Enable PAUSE for DREX */
247 setbits_le32(&clk->pause, ENABLE_BIT);
249 /* Enable BYPASS mode */
250 setbits_le32(&clk->bpll_con1, BYPASS_EN);
252 writel(MUX_BPLL_SEL_FOUTBPLL, &clk->src_cdrex);
254 val = readl(&clk->mux_stat_cdrex);
255 val &= BPLL_SEL_MASK;
256 } while (val != FOUTBPLL);
258 clrbits_le32(&clk->bpll_con1, BYPASS_EN);
260 /* Specify the DDR memory type as DDR3 */
261 val = readl(&phy0_ctrl->phy_con0);
262 val &= ~(PHY_CON0_CTRL_DDR_MODE_MASK << PHY_CON0_CTRL_DDR_MODE_SHIFT);
263 val |= (DDR_MODE_DDR3 << PHY_CON0_CTRL_DDR_MODE_SHIFT);
264 writel(val, &phy0_ctrl->phy_con0);
266 val = readl(&phy1_ctrl->phy_con0);
267 val &= ~(PHY_CON0_CTRL_DDR_MODE_MASK << PHY_CON0_CTRL_DDR_MODE_SHIFT);
268 val |= (DDR_MODE_DDR3 << PHY_CON0_CTRL_DDR_MODE_SHIFT);
269 writel(val, &phy1_ctrl->phy_con0);
271 /* Set Read Latency and Burst Length for PHY0 and PHY1 */
272 val = (mem->ctrl_bstlen << PHY_CON42_CTRL_BSTLEN_SHIFT) |
273 (mem->ctrl_rdlat << PHY_CON42_CTRL_RDLAT_SHIFT);
274 writel(val, &phy0_ctrl->phy_con42);
275 writel(val, &phy1_ctrl->phy_con42);
277 val = readl(&phy0_ctrl->phy_con26);
278 val &= ~(T_WRDATA_EN_MASK << T_WRDATA_EN_OFFSET);
279 val |= (T_WRDATA_EN_DDR3 << T_WRDATA_EN_OFFSET);
280 writel(val, &phy0_ctrl->phy_con26);
282 val = readl(&phy1_ctrl->phy_con26);
283 val &= ~(T_WRDATA_EN_MASK << T_WRDATA_EN_OFFSET);
284 val |= (T_WRDATA_EN_DDR3 << T_WRDATA_EN_OFFSET);
285 writel(val, &phy1_ctrl->phy_con26);
288 * Set Driver strength for CK, CKE, CS & CA to 0x7
289 * Set Driver strength for Data Slice 0~3 to 0x7
291 val = (0x7 << CA_CK_DRVR_DS_OFFSET) | (0x7 << CA_CKE_DRVR_DS_OFFSET) |
292 (0x7 << CA_CS_DRVR_DS_OFFSET) | (0x7 << CA_ADR_DRVR_DS_OFFSET);
293 val |= (0x7 << DA_3_DS_OFFSET) | (0x7 << DA_2_DS_OFFSET) |
294 (0x7 << DA_1_DS_OFFSET) | (0x7 << DA_0_DS_OFFSET);
295 writel(val, &phy0_ctrl->phy_con39);
296 writel(val, &phy1_ctrl->phy_con39);
299 if (dmc_config_zq(mem, &phy0_ctrl->phy_con16, &phy1_ctrl->phy_con16,
300 &phy0_ctrl->phy_con17, &phy1_ctrl->phy_con17))
301 return SETUP_ERR_ZQ_CALIBRATION_FAILURE;
303 clrbits_le32(&phy0_ctrl->phy_con16, ZQ_CLK_DIV_EN);
304 clrbits_le32(&phy1_ctrl->phy_con16, ZQ_CLK_DIV_EN);
307 val = readl(&phy0_ctrl->phy_con14);
308 val |= mem->phy0_pulld_dqs;
309 writel(val, &phy0_ctrl->phy_con14);
310 val = readl(&phy1_ctrl->phy_con14);
311 val |= mem->phy1_pulld_dqs;
312 writel(val, &phy1_ctrl->phy_con14);
314 val = MEM_TERM_EN | PHY_TERM_EN;
315 writel(val, &drex0->phycontrol0);
316 writel(val, &drex1->phycontrol0);
318 writel(mem->concontrol |
319 (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT) |
320 (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
322 writel(mem->concontrol |
323 (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT) |
324 (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
328 val = readl(&drex0->phystatus);
329 } while ((val & DFI_INIT_COMPLETE) != DFI_INIT_COMPLETE);
331 val = readl(&drex1->phystatus);
332 } while ((val & DFI_INIT_COMPLETE) != DFI_INIT_COMPLETE);
334 clrbits_le32(&drex0->concontrol, DFI_INIT_START);
335 clrbits_le32(&drex1->concontrol, DFI_INIT_START);
337 update_reset_dll(&drex0->phycontrol0, DDR_MODE_DDR3);
338 update_reset_dll(&drex1->phycontrol0, DDR_MODE_DDR3);
342 * 0x2000_0000 ~ 0x5FFF_FFFF
343 * 0x6000_0000 ~ 0x9FFF_FFFF
346 val = DMC_MEMBASECONFIGX_CHIP_BASE(DMC_CHIP_BASE_0) |
347 DMC_MEMBASECONFIGX_CHIP_MASK(DMC_CHIP_MASK);
348 writel(val, &tzasc0->membaseconfig0);
349 writel(val, &tzasc1->membaseconfig0);
352 val = DMC_MEMBASECONFIGX_CHIP_BASE(DMC_CHIP_BASE_1) |
353 DMC_MEMBASECONFIGX_CHIP_MASK(DMC_CHIP_MASK);
354 writel(val, &tzasc0->membaseconfig1);
355 writel(val, &tzasc1->membaseconfig1);
358 * Memory Channel Inteleaving Size
359 * Ares Channel interleaving = 128 bytes
362 writel(mem->memconfig, &tzasc0->memconfig0);
363 writel(mem->memconfig, &tzasc1->memconfig0);
364 writel(mem->memconfig, &tzasc0->memconfig1);
365 writel(mem->memconfig, &tzasc1->memconfig1);
367 /* Precharge Configuration */
368 writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
369 &drex0->prechconfig0);
370 writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
371 &drex1->prechconfig0);
374 * TimingRow, TimingData, TimingPower and Timingaref
375 * values as per Memory AC parameters
377 writel(mem->timing_ref, &drex0->timingref);
378 writel(mem->timing_ref, &drex1->timingref);
379 writel(mem->timing_row, &drex0->timingrow0);
380 writel(mem->timing_row, &drex1->timingrow0);
381 writel(mem->timing_data, &drex0->timingdata0);
382 writel(mem->timing_data, &drex1->timingdata0);
383 writel(mem->timing_power, &drex0->timingpower0);
384 writel(mem->timing_power, &drex1->timingpower0);
388 * Send NOP, MRS and ZQINIT commands
389 * Sending MRS command will reset the DRAM. We should not be
390 * reseting the DRAM after resume, this will lead to memory
391 * corruption as DRAM content is lost after DRAM reset
393 dmc_config_mrs(mem, &drex0->directcmd);
394 dmc_config_mrs(mem, &drex1->directcmd);
398 * Get PHY_CON13 from both phys. Gate CLKM around reading since
399 * PHY_CON13 is glitchy when CLKM is running. We're paranoid and
400 * wait until we get a "fine lock", though a coarse lock is probably
401 * OK (we only use the coarse numbers below). We try to gate the
402 * clock for as short a time as possible in case SDRAM is somehow
403 * sensitive. sdelay(10) in the loop is arbitrary to make sure
404 * there is some time for PHY_CON13 to get updated. In practice
405 * no delay appears to be needed.
407 val = readl(&clk->gate_bus_cdrex);
409 writel(val & ~0x1, &clk->gate_bus_cdrex);
410 lock0_info = readl(&phy0_ctrl->phy_con13);
411 writel(val, &clk->gate_bus_cdrex);
413 if ((lock0_info & CTRL_FINE_LOCKED) == CTRL_FINE_LOCKED)
419 writel(val & ~0x2, &clk->gate_bus_cdrex);
420 lock1_info = readl(&phy1_ctrl->phy_con13);
421 writel(val, &clk->gate_bus_cdrex);
423 if ((lock1_info & CTRL_FINE_LOCKED) == CTRL_FINE_LOCKED)
431 * During Suspend-Resume & S/W-Reset, as soon as PMU releases
432 * pad retention, CKE goes high. This causes memory contents
433 * not to be retained during DRAM initialization. Therfore,
434 * there is a new control register(0x100431e8[28]) which lets us
435 * release pad retention and retain the memory content until the
436 * initialization is complete.
438 writel(PAD_RETENTION_DRAM_COREBLK_VAL,
439 &power->pad_retention_dram_coreblk_option);
441 val = readl(&power->pad_retention_dram_status);
442 } while (val != 0x1);
445 * CKE PAD retention disables DRAM self-refresh mode.
446 * Send auto refresh command for DRAM refresh.
448 for (i = 0; i < 128; i++) {
449 for (chip = 0; chip < mem->chips_to_configure; chip++) {
450 writel(DIRECT_CMD_REFA |
451 (chip << DIRECT_CMD_CHIP_SHIFT),
453 writel(DIRECT_CMD_REFA |
454 (chip << DIRECT_CMD_CHIP_SHIFT),
460 if (mem->gate_leveling_enable) {
461 writel(PHY_CON0_RESET_VAL, &phy0_ctrl->phy_con0);
462 writel(PHY_CON0_RESET_VAL, &phy1_ctrl->phy_con0);
464 setbits_le32(&phy0_ctrl->phy_con0, P0_CMD_EN);
465 setbits_le32(&phy1_ctrl->phy_con0, P0_CMD_EN);
467 val = PHY_CON2_RESET_VAL;
468 val |= INIT_DESKEW_EN;
469 writel(val, &phy0_ctrl->phy_con2);
470 writel(val, &phy1_ctrl->phy_con2);
472 val = readl(&phy0_ctrl->phy_con1);
473 val |= (RDLVL_PASS_ADJ_VAL << RDLVL_PASS_ADJ_OFFSET);
474 writel(val, &phy0_ctrl->phy_con1);
476 val = readl(&phy1_ctrl->phy_con1);
477 val |= (RDLVL_PASS_ADJ_VAL << RDLVL_PASS_ADJ_OFFSET);
478 writel(val, &phy1_ctrl->phy_con1);
480 n_lock_w_phy0 = (lock0_info & CTRL_LOCK_COARSE_MASK) >> 2;
481 n_lock_r = readl(&phy0_ctrl->phy_con12);
482 n_lock_r &= ~CTRL_DLL_ON;
483 n_lock_r |= n_lock_w_phy0;
484 writel(n_lock_r, &phy0_ctrl->phy_con12);
486 n_lock_w_phy1 = (lock1_info & CTRL_LOCK_COARSE_MASK) >> 2;
487 n_lock_r = readl(&phy1_ctrl->phy_con12);
488 n_lock_r &= ~CTRL_DLL_ON;
489 n_lock_r |= n_lock_w_phy1;
490 writel(n_lock_r, &phy1_ctrl->phy_con12);
492 val = (0x3 << DIRECT_CMD_BANK_SHIFT) | 0x4;
493 for (chip = 0; chip < mem->chips_to_configure; chip++) {
494 writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
496 writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
500 setbits_le32(&phy0_ctrl->phy_con2, RDLVL_GATE_EN);
501 setbits_le32(&phy1_ctrl->phy_con2, RDLVL_GATE_EN);
503 setbits_le32(&phy0_ctrl->phy_con0, CTRL_SHGATE);
504 setbits_le32(&phy1_ctrl->phy_con0, CTRL_SHGATE);
506 val = readl(&phy0_ctrl->phy_con1);
507 val &= ~(CTRL_GATEDURADJ_MASK);
508 writel(val, &phy0_ctrl->phy_con1);
510 val = readl(&phy1_ctrl->phy_con1);
511 val &= ~(CTRL_GATEDURADJ_MASK);
512 writel(val, &phy1_ctrl->phy_con1);
514 writel(CTRL_RDLVL_GATE_ENABLE, &drex0->rdlvl_config);
516 while (((readl(&drex0->phystatus) & RDLVL_COMPLETE_CHO) !=
517 RDLVL_COMPLETE_CHO) && (i > 0)) {
519 * TODO(waihong): Comment on how long this take to
526 return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
527 writel(CTRL_RDLVL_GATE_DISABLE, &drex0->rdlvl_config);
529 writel(CTRL_RDLVL_GATE_ENABLE, &drex1->rdlvl_config);
531 while (((readl(&drex1->phystatus) & RDLVL_COMPLETE_CHO) !=
532 RDLVL_COMPLETE_CHO) && (i > 0)) {
534 * TODO(waihong): Comment on how long this take to
541 return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
542 writel(CTRL_RDLVL_GATE_DISABLE, &drex1->rdlvl_config);
544 writel(0, &phy0_ctrl->phy_con14);
545 writel(0, &phy1_ctrl->phy_con14);
547 val = (0x3 << DIRECT_CMD_BANK_SHIFT);
548 for (chip = 0; chip < mem->chips_to_configure; chip++) {
549 writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
551 writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
555 /* Common Settings for Leveling */
556 val = PHY_CON12_RESET_VAL;
557 writel((val + n_lock_w_phy0), &phy0_ctrl->phy_con12);
558 writel((val + n_lock_w_phy1), &phy1_ctrl->phy_con12);
560 setbits_le32(&phy0_ctrl->phy_con2, DLL_DESKEW_EN);
561 setbits_le32(&phy1_ctrl->phy_con2, DLL_DESKEW_EN);
564 /* Send PALL command */
565 dmc_config_prech(mem, &drex0->directcmd);
566 dmc_config_prech(mem, &drex1->directcmd);
568 writel(mem->memcontrol, &drex0->memcontrol);
569 writel(mem->memcontrol, &drex1->memcontrol);
572 * Set DMC Concontrol: Enable auto-refresh counter, provide
573 * read data fetch cycles and enable DREX auto set powerdown
574 * for input buffer of I/O in none read memory state.
576 writel(mem->concontrol | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT) |
577 (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)|
578 DMC_CONCONTROL_IO_PD_CON(0x2),
580 writel(mem->concontrol | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT) |
581 (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)|
582 DMC_CONCONTROL_IO_PD_CON(0x2),
586 * Enable Clock Gating Control for DMC
587 * this saves around 25 mw dmc power as compared to the power
588 * consumption without these bits enabled
590 setbits_le32(&drex0->cgcontrol, DMC_INTERNAL_CG);
591 setbits_le32(&drex1->cgcontrol, DMC_INTERNAL_CG);