1 // SPDX-License-Identifier: GPL-2.0+
3 * DDR Configuration for AM33xx devices.
5 * Copyright (C) 2011 Texas Instruments Incorporated - https://www.ti.com/
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/ddr_defs.h>
12 #include <asm/arch/sys_proto.h>
15 #include <linux/delay.h>
18 * Base address for EMIF instances
20 static struct emif_reg_struct *emif_reg[2] = {
21 (struct emif_reg_struct *)EMIF4_0_CFG_BASE,
22 (struct emif_reg_struct *)EMIF4_1_CFG_BASE};
25 * Base addresses for DDR PHY cmd/data regs
27 static struct ddr_cmd_regs *ddr_cmd_reg[2] = {
28 (struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR,
29 (struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR2};
31 static struct ddr_data_regs *ddr_data_reg[2] = {
32 (struct ddr_data_regs *)DDR_PHY_DATA_ADDR,
33 (struct ddr_data_regs *)DDR_PHY_DATA_ADDR2};
36 * Base address for ddr io control instances
38 static struct ddr_cmdtctrl *ioctrl_reg = {
39 (struct ddr_cmdtctrl *)DDR_CONTROL_BASE_ADDR};
41 static inline u32 get_mr(int nr, u32 cs, u32 mr_addr)
45 mr_addr |= cs << EMIF_REG_CS_SHIFT;
46 writel(mr_addr, &emif_reg[nr]->emif_lpddr2_mode_reg_cfg);
48 mr = readl(&emif_reg[nr]->emif_lpddr2_mode_reg_data);
49 debug("get_mr: EMIF1 cs %d mr %08x val 0x%x\n", cs, mr_addr, mr);
50 if (((mr & 0x0000ff00) >> 8) == (mr & 0xff) &&
51 ((mr & 0x00ff0000) >> 16) == (mr & 0xff) &&
52 ((mr & 0xff000000) >> 24) == (mr & 0xff))
58 static inline void set_mr(int nr, u32 cs, u32 mr_addr, u32 mr_val)
60 mr_addr |= cs << EMIF_REG_CS_SHIFT;
61 writel(mr_addr, &emif_reg[nr]->emif_lpddr2_mode_reg_cfg);
62 writel(mr_val, &emif_reg[nr]->emif_lpddr2_mode_reg_data);
65 static void configure_mr(int nr, u32 cs)
69 while (get_mr(nr, cs, LPDDR2_MR0) & LPDDR2_MR0_DAI_MASK)
71 set_mr(nr, cs, LPDDR2_MR10, 0x56);
73 set_mr(nr, cs, LPDDR2_MR1, 0x43);
74 set_mr(nr, cs, LPDDR2_MR2, 0x2);
76 mr_addr = LPDDR2_MR2 | EMIF_REG_REFRESH_EN_MASK;
77 set_mr(nr, cs, mr_addr, 0x2);
81 * Configure EMIF4D5 registers and MR registers For details about these magic
82 * values please see the EMIF registers section of the TRM.
84 void config_sdram_emif4d5(const struct emif_regs *regs, int nr)
87 struct prm_device_inst *prm_device =
88 (struct prm_device_inst *)PRM_DEVICE_INST;
91 writel(0xA0, &emif_reg[nr]->emif_pwr_mgmt_ctrl);
92 writel(0xA0, &emif_reg[nr]->emif_pwr_mgmt_ctrl_shdw);
93 writel(regs->zq_config, &emif_reg[nr]->emif_zq_config);
95 writel(regs->temp_alert_config, &emif_reg[nr]->emif_temp_alert_config);
96 writel(regs->emif_rd_wr_lvl_rmp_win,
97 &emif_reg[nr]->emif_rd_wr_lvl_rmp_win);
98 writel(regs->emif_rd_wr_lvl_rmp_ctl,
99 &emif_reg[nr]->emif_rd_wr_lvl_rmp_ctl);
100 writel(regs->emif_rd_wr_lvl_ctl, &emif_reg[nr]->emif_rd_wr_lvl_ctl);
101 writel(regs->emif_rd_wr_exec_thresh,
102 &emif_reg[nr]->emif_rd_wr_exec_thresh);
105 * for most SOCs these registers won't need to be changed so only
106 * write to these registers if someone explicitly has set the
109 if(regs->emif_cos_config) {
110 writel(regs->emif_prio_class_serv_map, &emif_reg[nr]->emif_prio_class_serv_map);
111 writel(regs->emif_connect_id_serv_1_map, &emif_reg[nr]->emif_connect_id_serv_1_map);
112 writel(regs->emif_connect_id_serv_2_map, &emif_reg[nr]->emif_connect_id_serv_2_map);
113 writel(regs->emif_cos_config, &emif_reg[nr]->emif_cos_config);
117 * Sequence to ensure that the PHY is in a known state prior to
118 * startting hardware leveling. Also acts as to latch some state from
119 * the EMIF into the PHY.
121 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
122 writel(0x2411, &emif_reg[nr]->emif_iodft_tlgc);
123 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
125 clrbits_le32(&emif_reg[nr]->emif_sdram_ref_ctrl,
126 EMIF_REG_INITREF_DIS_MASK);
128 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
129 writel(regs->sdram_config, &cstat->secure_emif_sdram_config);
131 /* Wait 1ms because of L3 timeout error */
134 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
135 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
139 * Disable EMIF_DEVOFF
140 * -> Cold Boot: This is just rewriting the default register value.
141 * -> RTC Resume: Must disable DEVOFF before leveling.
143 writel(0, &prm_device->emif_ctrl);
146 /* Perform hardware leveling for DDR3 */
147 if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3) {
148 writel(readl(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36) |
149 0x100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36);
150 writel(readl(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw) |
151 0x100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw);
153 writel(0x80000000, &emif_reg[nr]->emif_rd_wr_lvl_rmp_ctl);
155 /* Enable read leveling */
156 writel(0x80000000, &emif_reg[nr]->emif_rd_wr_lvl_ctl);
158 /* Wait 1ms because of L3 timeout error */
162 * Enable full read and write leveling. Wait for read and write
163 * leveling bit to clear RDWRLVLFULL_START bit 31
165 while ((readl(&emif_reg[nr]->emif_rd_wr_lvl_ctl) & 0x80000000)
169 /* Check the timeout register to see if leveling is complete */
170 if ((readl(&emif_reg[nr]->emif_status) & 0x70) != 0)
171 puts("DDR3 H/W leveling incomplete with errors\n");
183 void config_sdram(const struct emif_regs *regs, int nr)
185 if (regs->zq_config) {
186 writel(regs->zq_config, &emif_reg[nr]->emif_zq_config);
187 writel(regs->sdram_config, &cstat->secure_emif_sdram_config);
188 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
190 /* Trigger initialization */
191 writel(0x00003100, &emif_reg[nr]->emif_sdram_ref_ctrl);
192 /* Wait 1ms because of L3 timeout error */
195 /* Write proper sdram_ref_cref_ctrl value */
196 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
197 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
199 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
200 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
201 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
203 /* Write REG_COS_COUNT_1, REG_COS_COUNT_2, and REG_PR_OLD_COUNT. */
204 if (regs->ocp_config)
205 writel(regs->ocp_config, &emif_reg[nr]->emif_l3_config);
211 void set_sdram_timings(const struct emif_regs *regs, int nr)
213 writel(regs->sdram_tim1, &emif_reg[nr]->emif_sdram_tim_1);
214 writel(regs->sdram_tim1, &emif_reg[nr]->emif_sdram_tim_1_shdw);
215 writel(regs->sdram_tim2, &emif_reg[nr]->emif_sdram_tim_2);
216 writel(regs->sdram_tim2, &emif_reg[nr]->emif_sdram_tim_2_shdw);
217 writel(regs->sdram_tim3, &emif_reg[nr]->emif_sdram_tim_3);
218 writel(regs->sdram_tim3, &emif_reg[nr]->emif_sdram_tim_3_shdw);
222 * Configure EXT PHY registers for software leveling
224 static void ext_phy_settings_swlvl(const struct emif_regs *regs, int nr)
226 u32 *ext_phy_ctrl_base = 0;
227 u32 *emif_ext_phy_ctrl_base = 0;
228 __maybe_unused const u32 *ext_phy_ctrl_const_regs;
230 __maybe_unused u32 size;
232 ext_phy_ctrl_base = (u32 *)&(regs->emif_ddr_ext_phy_ctrl_1);
233 emif_ext_phy_ctrl_base =
234 (u32 *)&(emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
236 /* Configure external phy control timing registers */
237 for (i = 0; i < EMIF_EXT_PHY_CTRL_TIMING_REG; i++) {
238 writel(*ext_phy_ctrl_base, emif_ext_phy_ctrl_base++);
239 /* Update shadow registers */
240 writel(*ext_phy_ctrl_base++, emif_ext_phy_ctrl_base++);
245 * External phy 6-24 registers do not change with ddr frequency.
246 * These only need to be set on DDR2 on AM43xx.
248 emif_get_ext_phy_ctrl_const_regs(&ext_phy_ctrl_const_regs, &size);
253 for (i = 0; i < size; i++) {
254 writel(ext_phy_ctrl_const_regs[i], emif_ext_phy_ctrl_base++);
255 /* Update shadow registers */
256 writel(ext_phy_ctrl_const_regs[i], emif_ext_phy_ctrl_base++);
262 * Configure EXT PHY registers for hardware leveling
264 static void ext_phy_settings_hwlvl(const struct emif_regs *regs, int nr)
267 * Enable hardware leveling on the EMIF. For details about these
268 * magic values please see the EMIF registers section of the TRM.
270 if (regs->emif_ddr_phy_ctlr_1 & 0x00040000) {
271 /* PHY_INVERT_CLKOUT = 1 */
272 writel(0x00040100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
273 writel(0x00040100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1_shdw);
275 /* PHY_INVERT_CLKOUT = 0 */
276 writel(0x08020080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
277 writel(0x08020080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1_shdw);
280 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_22);
281 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_22_shdw);
282 writel(0x00600020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_23);
283 writel(0x00600020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_23_shdw);
284 writel(0x40010080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_24);
285 writel(0x40010080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_24_shdw);
286 writel(0x08102040, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_25);
287 writel(0x08102040, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_25_shdw);
288 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_26);
289 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_26_shdw);
290 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_27);
291 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_27_shdw);
292 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_28);
293 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_28_shdw);
294 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_29);
295 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_29_shdw);
296 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_30);
297 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_30_shdw);
298 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_31);
299 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_31_shdw);
300 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_32);
301 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_32_shdw);
302 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_33);
303 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_33_shdw);
304 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_34);
305 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_34_shdw);
306 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_35);
307 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_35_shdw);
308 writel(0x00000077, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36);
309 writel(0x00000077, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw);
312 * Sequence to ensure that the PHY is again in a known state after
315 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
316 writel(0x2411, &emif_reg[nr]->emif_iodft_tlgc);
317 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
323 void config_ddr_phy(const struct emif_regs *regs, int nr)
326 * Disable initialization and refreshes for now until we finish
327 * programming EMIF regs and set time between rising edge of
328 * DDR_RESET to rising edge of DDR_CKE to > 500us per memory spec.
329 * We currently hardcode a value based on a max expected frequency
332 writel(EMIF_REG_INITREF_DIS_MASK | 0x3100,
333 &emif_reg[nr]->emif_sdram_ref_ctrl);
335 writel(regs->emif_ddr_phy_ctlr_1,
336 &emif_reg[nr]->emif_ddr_phy_ctrl_1);
337 writel(regs->emif_ddr_phy_ctlr_1,
338 &emif_reg[nr]->emif_ddr_phy_ctrl_1_shdw);
340 if (get_emif_rev((u32)emif_reg[nr]) == EMIF_4D5) {
341 if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3)
342 ext_phy_settings_hwlvl(regs, nr);
344 ext_phy_settings_swlvl(regs, nr);
349 * Configure DDR CMD control registers
351 void config_cmd_ctrl(const struct cmd_control *cmd, int nr)
356 writel(cmd->cmd0csratio, &ddr_cmd_reg[nr]->cm0csratio);
357 writel(cmd->cmd0iclkout, &ddr_cmd_reg[nr]->cm0iclkout);
359 writel(cmd->cmd1csratio, &ddr_cmd_reg[nr]->cm1csratio);
360 writel(cmd->cmd1iclkout, &ddr_cmd_reg[nr]->cm1iclkout);
362 writel(cmd->cmd2csratio, &ddr_cmd_reg[nr]->cm2csratio);
363 writel(cmd->cmd2iclkout, &ddr_cmd_reg[nr]->cm2iclkout);
367 * Configure DDR DATA registers
369 void config_ddr_data(const struct ddr_data *data, int nr)
376 for (i = 0; i < DDR_DATA_REGS_NR; i++) {
377 writel(data->datardsratio0,
378 &(ddr_data_reg[nr]+i)->dt0rdsratio0);
379 writel(data->datawdsratio0,
380 &(ddr_data_reg[nr]+i)->dt0wdsratio0);
381 writel(data->datawiratio0,
382 &(ddr_data_reg[nr]+i)->dt0wiratio0);
383 writel(data->datagiratio0,
384 &(ddr_data_reg[nr]+i)->dt0giratio0);
385 writel(data->datafwsratio0,
386 &(ddr_data_reg[nr]+i)->dt0fwsratio0);
387 writel(data->datawrsratio0,
388 &(ddr_data_reg[nr]+i)->dt0wrsratio0);
392 void config_io_ctrl(const struct ctrl_ioregs *ioregs)
397 writel(ioregs->cm0ioctl, &ioctrl_reg->cm0ioctl);
398 writel(ioregs->cm1ioctl, &ioctrl_reg->cm1ioctl);
399 writel(ioregs->cm2ioctl, &ioctrl_reg->cm2ioctl);
400 writel(ioregs->dt0ioctl, &ioctrl_reg->dt0ioctl);
401 writel(ioregs->dt1ioctl, &ioctrl_reg->dt1ioctl);
403 writel(ioregs->dt2ioctrl, &ioctrl_reg->dt2ioctrl);
404 writel(ioregs->dt3ioctrl, &ioctrl_reg->dt3ioctrl);
405 writel(ioregs->emif_sdram_config_ext,
406 &ioctrl_reg->emif_sdram_config_ext);