2 * Copyright (C) 2017 Weidmüller Interface GmbH & Co. KG
3 * Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
5 * Copyright (C) 2013 Soren Brinkmann <soren.brinkmann@xilinx.com>
6 * Copyright (C) 2013 Xilinx, Inc. All rights reserved.
8 * SPDX-License-Identifier: GPL-2.0+
12 #include <clk-uclass.h>
17 #include <asm/arch/clk.h>
18 #include <asm/arch/hardware.h>
19 #include <asm/arch/sys_proto.h>
21 /* Register bitfield defines */
22 #define PLLCTRL_FBDIV_MASK 0x7f000
23 #define PLLCTRL_FBDIV_SHIFT 12
24 #define PLLCTRL_BPFORCE_MASK (1 << 4)
25 #define PLLCTRL_PWRDWN_MASK 2
26 #define PLLCTRL_PWRDWN_SHIFT 1
27 #define PLLCTRL_RESET_MASK 1
28 #define PLLCTRL_RESET_SHIFT 0
30 #define ZYNQ_CLK_MAXDIV 0x3f
31 #define CLK_CTRL_DIV1_SHIFT 20
32 #define CLK_CTRL_DIV1_MASK (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV1_SHIFT)
33 #define CLK_CTRL_DIV0_SHIFT 8
34 #define CLK_CTRL_DIV0_MASK (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV0_SHIFT)
35 #define CLK_CTRL_SRCSEL_SHIFT 4
36 #define CLK_CTRL_SRCSEL_MASK (0x3 << CLK_CTRL_SRCSEL_SHIFT)
38 #define CLK_CTRL_DIV2X_SHIFT 26
39 #define CLK_CTRL_DIV2X_MASK (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV2X_SHIFT)
40 #define CLK_CTRL_DIV3X_SHIFT 20
41 #define CLK_CTRL_DIV3X_MASK (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV3X_SHIFT)
43 DECLARE_GLOBAL_DATA_PTR;
45 #ifndef CONFIG_SPL_BUILD
46 enum zynq_clk_rclk {mio_clk, emio_clk};
49 struct zynq_clk_priv {
53 static void *zynq_clk_get_register(enum zynq_clk id)
57 return &slcr_base->arm_pll_ctrl;
59 return &slcr_base->ddr_pll_ctrl;
61 return &slcr_base->io_pll_ctrl;
63 return &slcr_base->lqspi_clk_ctrl;
65 return &slcr_base->smc_clk_ctrl;
67 return &slcr_base->pcap_clk_ctrl;
68 case sdio0_clk ... sdio1_clk:
69 return &slcr_base->sdio_clk_ctrl;
70 case uart0_clk ... uart1_clk:
71 return &slcr_base->uart_clk_ctrl;
72 case spi0_clk ... spi1_clk:
73 return &slcr_base->spi_clk_ctrl;
74 #ifndef CONFIG_SPL_BUILD
76 return &slcr_base->dci_clk_ctrl;
78 return &slcr_base->gem0_clk_ctrl;
80 return &slcr_base->gem1_clk_ctrl;
82 return &slcr_base->fpga0_clk_ctrl;
84 return &slcr_base->fpga1_clk_ctrl;
86 return &slcr_base->fpga2_clk_ctrl;
88 return &slcr_base->fpga3_clk_ctrl;
89 case can0_clk ... can1_clk:
90 return &slcr_base->can_clk_ctrl;
91 case dbg_trc_clk ... dbg_apb_clk:
95 return &slcr_base->dbg_clk_ctrl;
99 static enum zynq_clk zynq_clk_get_cpu_pll(u32 clk_ctrl)
101 u32 srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
114 static enum zynq_clk zynq_clk_get_peripheral_pll(u32 clk_ctrl)
116 u32 srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
129 static ulong zynq_clk_get_pll_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
131 u32 clk_ctrl, reset, pwrdwn, mul, bypass;
133 clk_ctrl = readl(zynq_clk_get_register(id));
135 reset = (clk_ctrl & PLLCTRL_RESET_MASK) >> PLLCTRL_RESET_SHIFT;
136 pwrdwn = (clk_ctrl & PLLCTRL_PWRDWN_MASK) >> PLLCTRL_PWRDWN_SHIFT;
140 bypass = clk_ctrl & PLLCTRL_BPFORCE_MASK;
144 mul = (clk_ctrl & PLLCTRL_FBDIV_MASK) >> PLLCTRL_FBDIV_SHIFT;
146 return priv->ps_clk_freq * mul;
149 #ifndef CONFIG_SPL_BUILD
150 static enum zynq_clk_rclk zynq_clk_get_gem_rclk(enum zynq_clk id)
152 u32 clk_ctrl, srcsel;
155 clk_ctrl = readl(&slcr_base->gem0_rclk_ctrl);
157 clk_ctrl = readl(&slcr_base->gem1_rclk_ctrl);
159 srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
167 static ulong zynq_clk_get_cpu_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
169 u32 clk_621, clk_ctrl, div;
172 clk_ctrl = readl(&slcr_base->arm_clk_ctrl);
174 div = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
181 clk_621 = readl(&slcr_base->clk_621_true) & 1;
193 pll = zynq_clk_get_cpu_pll(clk_ctrl);
195 return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, pll), div);
198 #ifndef CONFIG_SPL_BUILD
199 static ulong zynq_clk_get_ddr2x_rate(struct zynq_clk_priv *priv)
203 clk_ctrl = readl(&slcr_base->ddr_clk_ctrl);
205 div = (clk_ctrl & CLK_CTRL_DIV2X_MASK) >> CLK_CTRL_DIV2X_SHIFT;
207 return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, ddrpll_clk), div);
211 static ulong zynq_clk_get_ddr3x_rate(struct zynq_clk_priv *priv)
215 clk_ctrl = readl(&slcr_base->ddr_clk_ctrl);
217 div = (clk_ctrl & CLK_CTRL_DIV3X_MASK) >> CLK_CTRL_DIV3X_SHIFT;
219 return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, ddrpll_clk), div);
222 #ifndef CONFIG_SPL_BUILD
223 static ulong zynq_clk_get_dci_rate(struct zynq_clk_priv *priv)
225 u32 clk_ctrl, div0, div1;
227 clk_ctrl = readl(&slcr_base->dci_clk_ctrl);
229 div0 = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
230 div1 = (clk_ctrl & CLK_CTRL_DIV1_MASK) >> CLK_CTRL_DIV1_SHIFT;
232 return DIV_ROUND_CLOSEST(DIV_ROUND_CLOSEST(
233 zynq_clk_get_pll_rate(priv, ddrpll_clk), div0), div1);
237 static ulong zynq_clk_get_peripheral_rate(struct zynq_clk_priv *priv,
238 enum zynq_clk id, bool two_divs)
244 clk_ctrl = readl(zynq_clk_get_register(id));
246 div0 = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
250 #ifndef CONFIG_SPL_BUILD
252 div1 = (clk_ctrl & CLK_CTRL_DIV1_MASK) >> CLK_CTRL_DIV1_SHIFT;
258 pll = zynq_clk_get_peripheral_pll(clk_ctrl);
263 zynq_clk_get_pll_rate(priv, pll), div0),
267 #ifndef CONFIG_SPL_BUILD
268 static ulong zynq_clk_get_gem_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
270 if (zynq_clk_get_gem_rclk(id) == mio_clk)
271 return zynq_clk_get_peripheral_rate(priv, id, true);
273 debug("%s: gem%d emio rx clock source unknown\n", __func__,
279 static unsigned long zynq_clk_calc_peripheral_two_divs(ulong rate,
281 u32 *div0, u32 *div1)
283 long new_err, best_err = (long)(~0UL >> 1);
284 ulong new_rate, best_rate = 0;
287 for (d0 = 1; d0 <= ZYNQ_CLK_MAXDIV; d0++) {
288 for (d1 = 1; d1 <= ZYNQ_CLK_MAXDIV >> 1; d1++) {
289 new_rate = DIV_ROUND_CLOSEST(
290 DIV_ROUND_CLOSEST(pll_rate, d0), d1);
291 new_err = abs(new_rate - rate);
293 if (new_err < best_err) {
297 best_rate = new_rate;
305 static ulong zynq_clk_set_peripheral_rate(struct zynq_clk_priv *priv,
306 enum zynq_clk id, ulong rate,
310 u32 clk_ctrl, div0 = 0, div1 = 0;
311 ulong pll_rate, new_rate;
314 reg = zynq_clk_get_register(id);
315 clk_ctrl = readl(reg);
317 pll = zynq_clk_get_peripheral_pll(clk_ctrl);
318 pll_rate = zynq_clk_get_pll_rate(priv, pll);
319 clk_ctrl &= ~CLK_CTRL_DIV0_MASK;
321 clk_ctrl &= ~CLK_CTRL_DIV1_MASK;
322 new_rate = zynq_clk_calc_peripheral_two_divs(rate, pll_rate,
324 clk_ctrl |= div1 << CLK_CTRL_DIV1_SHIFT;
326 div0 = DIV_ROUND_CLOSEST(pll_rate, rate);
327 if (div0 > ZYNQ_CLK_MAXDIV)
328 div0 = ZYNQ_CLK_MAXDIV;
329 new_rate = DIV_ROUND_CLOSEST(rate, div0);
331 clk_ctrl |= div0 << CLK_CTRL_DIV0_SHIFT;
334 writel(clk_ctrl, reg);
340 static ulong zynq_clk_set_gem_rate(struct zynq_clk_priv *priv, enum zynq_clk id,
343 if (zynq_clk_get_gem_rclk(id) == mio_clk)
344 return zynq_clk_set_peripheral_rate(priv, id, rate, true);
346 debug("%s: gem%d emio rx clock source unknown\n", __func__,
353 #ifndef CONFIG_SPL_BUILD
354 static ulong zynq_clk_get_rate(struct clk *clk)
356 struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
357 enum zynq_clk id = clk->id;
358 bool two_divs = false;
361 case armpll_clk ... iopll_clk:
362 return zynq_clk_get_pll_rate(priv, id);
363 case cpu_6or4x_clk ... cpu_1x_clk:
364 return zynq_clk_get_cpu_rate(priv, id);
366 return zynq_clk_get_ddr2x_rate(priv);
368 return zynq_clk_get_ddr3x_rate(priv);
370 return zynq_clk_get_dci_rate(priv);
371 case gem0_clk ... gem1_clk:
372 return zynq_clk_get_gem_rate(priv, id);
373 case fclk0_clk ... can1_clk:
376 case dbg_trc_clk ... dbg_apb_clk:
377 case lqspi_clk ... pcap_clk:
378 case sdio0_clk ... spi1_clk:
379 return zynq_clk_get_peripheral_rate(priv, id, two_divs);
381 return zynq_clk_get_cpu_rate(priv, cpu_2x_clk);
382 case usb0_aper_clk ... smc_aper_clk:
383 return zynq_clk_get_cpu_rate(priv, cpu_1x_clk);
389 static ulong zynq_clk_set_rate(struct clk *clk, ulong rate)
391 struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
392 enum zynq_clk id = clk->id;
393 bool two_divs = false;
396 case gem0_clk ... gem1_clk:
397 return zynq_clk_set_gem_rate(priv, id, rate);
398 case fclk0_clk ... can1_clk:
401 case lqspi_clk ... pcap_clk:
402 case sdio0_clk ... spi1_clk:
403 case dbg_trc_clk ... dbg_apb_clk:
404 return zynq_clk_set_peripheral_rate(priv, id, rate, two_divs);
410 static ulong zynq_clk_get_rate(struct clk *clk)
412 struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
413 enum zynq_clk id = clk->id;
416 case cpu_6or4x_clk ... cpu_1x_clk:
417 return zynq_clk_get_cpu_rate(priv, id);
419 return zynq_clk_get_ddr3x_rate(priv);
420 case lqspi_clk ... pcap_clk:
421 case sdio0_clk ... spi1_clk:
422 return zynq_clk_get_peripheral_rate(priv, id, 0);
429 static struct clk_ops zynq_clk_ops = {
430 .get_rate = zynq_clk_get_rate,
431 #ifndef CONFIG_SPL_BUILD
432 .set_rate = zynq_clk_set_rate,
436 static int zynq_clk_probe(struct udevice *dev)
438 struct zynq_clk_priv *priv = dev_get_priv(dev);
440 priv->ps_clk_freq = fdtdec_get_uint(gd->fdt_blob, dev->of_offset,
441 "ps-clk-frequency", 33333333UL);
446 static const struct udevice_id zynq_clk_ids[] = {
447 { .compatible = "xlnx,ps7-clkc"},
451 U_BOOT_DRIVER(zynq_clk) = {
454 .of_match = zynq_clk_ids,
455 .flags = DM_FLAG_PRE_RELOC,
456 .ops = &zynq_clk_ops,
457 .priv_auto_alloc_size = sizeof(struct zynq_clk_priv),
458 .probe = zynq_clk_probe,