1 // SPDX-License-Identifier: GPL-2.0+
3 * Copyright (C) 2017 Weidmüller Interface GmbH & Co. KG
4 * Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
6 * Copyright (C) 2013 Soren Brinkmann <soren.brinkmann@xilinx.com>
7 * Copyright (C) 2013 Xilinx, Inc. All rights reserved.
11 #include <clk-uclass.h>
14 #include <asm/global_data.h>
15 #include <dm/device_compat.h>
19 #include <asm/arch/clk.h>
20 #include <asm/arch/hardware.h>
21 #include <asm/arch/sys_proto.h>
23 /* Register bitfield defines */
24 #define PLLCTRL_FBDIV_MASK 0x7f000
25 #define PLLCTRL_FBDIV_SHIFT 12
26 #define PLLCTRL_BPFORCE_MASK (1 << 4)
27 #define PLLCTRL_PWRDWN_MASK 2
28 #define PLLCTRL_PWRDWN_SHIFT 1
29 #define PLLCTRL_RESET_MASK 1
30 #define PLLCTRL_RESET_SHIFT 0
32 #define ZYNQ_CLK_MAXDIV 0x3f
33 #define CLK_CTRL_DIV1_SHIFT 20
34 #define CLK_CTRL_DIV1_MASK (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV1_SHIFT)
35 #define CLK_CTRL_DIV0_SHIFT 8
36 #define CLK_CTRL_DIV0_MASK (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV0_SHIFT)
37 #define CLK_CTRL_SRCSEL_SHIFT 4
38 #define CLK_CTRL_SRCSEL_MASK (0x3 << CLK_CTRL_SRCSEL_SHIFT)
40 #define CLK_CTRL_DIV2X_SHIFT 26
41 #define CLK_CTRL_DIV2X_MASK (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV2X_SHIFT)
42 #define CLK_CTRL_DIV3X_SHIFT 20
43 #define CLK_CTRL_DIV3X_MASK (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV3X_SHIFT)
45 DECLARE_GLOBAL_DATA_PTR;
47 #ifndef CONFIG_SPL_BUILD
48 enum zynq_clk_rclk {mio_clk, emio_clk};
51 struct zynq_clk_priv {
53 #ifndef CONFIG_SPL_BUILD
54 struct clk gem_emio_clk[2];
58 static void *zynq_clk_get_register(enum zynq_clk id)
62 return &slcr_base->arm_pll_ctrl;
64 return &slcr_base->ddr_pll_ctrl;
66 return &slcr_base->io_pll_ctrl;
68 return &slcr_base->lqspi_clk_ctrl;
70 return &slcr_base->smc_clk_ctrl;
72 return &slcr_base->pcap_clk_ctrl;
73 case sdio0_clk ... sdio1_clk:
74 return &slcr_base->sdio_clk_ctrl;
75 case uart0_clk ... uart1_clk:
76 return &slcr_base->uart_clk_ctrl;
77 case spi0_clk ... spi1_clk:
78 return &slcr_base->spi_clk_ctrl;
79 #ifndef CONFIG_SPL_BUILD
81 return &slcr_base->dci_clk_ctrl;
83 return &slcr_base->gem0_clk_ctrl;
85 return &slcr_base->gem1_clk_ctrl;
87 return &slcr_base->fpga0_clk_ctrl;
89 return &slcr_base->fpga1_clk_ctrl;
91 return &slcr_base->fpga2_clk_ctrl;
93 return &slcr_base->fpga3_clk_ctrl;
94 case can0_clk ... can1_clk:
95 return &slcr_base->can_clk_ctrl;
96 case dbg_trc_clk ... dbg_apb_clk:
100 return &slcr_base->dbg_clk_ctrl;
104 static enum zynq_clk zynq_clk_get_cpu_pll(u32 clk_ctrl)
106 u32 srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
119 static enum zynq_clk zynq_clk_get_peripheral_pll(u32 clk_ctrl)
121 u32 srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
134 static ulong zynq_clk_get_pll_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
136 u32 clk_ctrl, reset, pwrdwn, mul, bypass;
138 clk_ctrl = readl(zynq_clk_get_register(id));
140 reset = (clk_ctrl & PLLCTRL_RESET_MASK) >> PLLCTRL_RESET_SHIFT;
141 pwrdwn = (clk_ctrl & PLLCTRL_PWRDWN_MASK) >> PLLCTRL_PWRDWN_SHIFT;
145 bypass = clk_ctrl & PLLCTRL_BPFORCE_MASK;
149 mul = (clk_ctrl & PLLCTRL_FBDIV_MASK) >> PLLCTRL_FBDIV_SHIFT;
151 return priv->ps_clk_freq * mul;
154 #ifndef CONFIG_SPL_BUILD
155 static enum zynq_clk_rclk zynq_clk_get_gem_rclk(enum zynq_clk id)
157 u32 clk_ctrl, srcsel;
160 clk_ctrl = readl(&slcr_base->gem0_rclk_ctrl);
162 clk_ctrl = readl(&slcr_base->gem1_rclk_ctrl);
164 srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
172 static ulong zynq_clk_get_cpu_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
174 u32 clk_621, clk_ctrl, div;
177 clk_ctrl = readl(&slcr_base->arm_clk_ctrl);
179 div = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
186 clk_621 = readl(&slcr_base->clk_621_true) & 1;
198 pll = zynq_clk_get_cpu_pll(clk_ctrl);
200 return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, pll), div);
203 #ifndef CONFIG_SPL_BUILD
204 static ulong zynq_clk_get_ddr2x_rate(struct zynq_clk_priv *priv)
208 clk_ctrl = readl(&slcr_base->ddr_clk_ctrl);
210 div = (clk_ctrl & CLK_CTRL_DIV2X_MASK) >> CLK_CTRL_DIV2X_SHIFT;
212 return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, ddrpll_clk), div);
216 static ulong zynq_clk_get_ddr3x_rate(struct zynq_clk_priv *priv)
220 clk_ctrl = readl(&slcr_base->ddr_clk_ctrl);
222 div = (clk_ctrl & CLK_CTRL_DIV3X_MASK) >> CLK_CTRL_DIV3X_SHIFT;
224 return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, ddrpll_clk), div);
227 #ifndef CONFIG_SPL_BUILD
228 static ulong zynq_clk_get_dci_rate(struct zynq_clk_priv *priv)
230 u32 clk_ctrl, div0, div1;
232 clk_ctrl = readl(&slcr_base->dci_clk_ctrl);
234 div0 = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
235 div1 = (clk_ctrl & CLK_CTRL_DIV1_MASK) >> CLK_CTRL_DIV1_SHIFT;
237 return DIV_ROUND_CLOSEST(DIV_ROUND_CLOSEST(
238 zynq_clk_get_pll_rate(priv, ddrpll_clk), div0), div1);
242 static ulong zynq_clk_get_peripheral_rate(struct zynq_clk_priv *priv,
243 enum zynq_clk id, bool two_divs)
249 clk_ctrl = readl(zynq_clk_get_register(id));
251 div0 = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
255 #ifndef CONFIG_SPL_BUILD
257 div1 = (clk_ctrl & CLK_CTRL_DIV1_MASK) >> CLK_CTRL_DIV1_SHIFT;
263 pll = zynq_clk_get_peripheral_pll(clk_ctrl);
268 zynq_clk_get_pll_rate(priv, pll), div0),
272 #ifndef CONFIG_SPL_BUILD
273 static ulong zynq_clk_get_gem_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
277 if (zynq_clk_get_gem_rclk(id) == mio_clk)
278 return zynq_clk_get_peripheral_rate(priv, id, true);
280 parent = &priv->gem_emio_clk[id - gem0_clk];
282 return clk_get_rate(parent);
284 debug("%s: gem%d emio rx clock source unknown\n", __func__,
290 static unsigned long zynq_clk_calc_peripheral_two_divs(ulong rate,
292 u32 *div0, u32 *div1)
294 long new_err, best_err = (long)(~0UL >> 1);
295 ulong new_rate, best_rate = 0;
298 for (d0 = 1; d0 <= ZYNQ_CLK_MAXDIV; d0++) {
299 for (d1 = 1; d1 <= ZYNQ_CLK_MAXDIV >> 1; d1++) {
300 new_rate = DIV_ROUND_CLOSEST(
301 DIV_ROUND_CLOSEST(pll_rate, d0), d1);
302 new_err = abs(new_rate - rate);
304 if (new_err < best_err) {
308 best_rate = new_rate;
316 static ulong zynq_clk_set_peripheral_rate(struct zynq_clk_priv *priv,
317 enum zynq_clk id, ulong rate,
321 u32 clk_ctrl, div0 = 0, div1 = 0;
322 ulong pll_rate, new_rate;
325 reg = zynq_clk_get_register(id);
326 clk_ctrl = readl(reg);
328 pll = zynq_clk_get_peripheral_pll(clk_ctrl);
329 pll_rate = zynq_clk_get_pll_rate(priv, pll);
330 clk_ctrl &= ~CLK_CTRL_DIV0_MASK;
332 clk_ctrl &= ~CLK_CTRL_DIV1_MASK;
333 new_rate = zynq_clk_calc_peripheral_two_divs(rate, pll_rate,
335 clk_ctrl |= div1 << CLK_CTRL_DIV1_SHIFT;
337 div0 = DIV_ROUND_CLOSEST(pll_rate, rate);
338 if (div0 > ZYNQ_CLK_MAXDIV)
339 div0 = ZYNQ_CLK_MAXDIV;
340 new_rate = DIV_ROUND_CLOSEST(rate, div0);
342 clk_ctrl |= div0 << CLK_CTRL_DIV0_SHIFT;
345 writel(clk_ctrl, reg);
351 static ulong zynq_clk_set_gem_rate(struct zynq_clk_priv *priv, enum zynq_clk id,
356 if (zynq_clk_get_gem_rclk(id) == mio_clk)
357 return zynq_clk_set_peripheral_rate(priv, id, rate, true);
359 parent = &priv->gem_emio_clk[id - gem0_clk];
361 return clk_set_rate(parent, rate);
363 debug("%s: gem%d emio rx clock source unknown\n", __func__,
370 #ifndef CONFIG_SPL_BUILD
371 static ulong zynq_clk_get_rate(struct clk *clk)
373 struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
374 enum zynq_clk id = clk->id;
375 bool two_divs = false;
378 case armpll_clk ... iopll_clk:
379 return zynq_clk_get_pll_rate(priv, id);
380 case cpu_6or4x_clk ... cpu_1x_clk:
381 return zynq_clk_get_cpu_rate(priv, id);
383 return zynq_clk_get_ddr2x_rate(priv);
385 return zynq_clk_get_ddr3x_rate(priv);
387 return zynq_clk_get_dci_rate(priv);
388 case gem0_clk ... gem1_clk:
389 return zynq_clk_get_gem_rate(priv, id);
390 case fclk0_clk ... can1_clk:
393 case dbg_trc_clk ... dbg_apb_clk:
394 case lqspi_clk ... pcap_clk:
395 case sdio0_clk ... spi1_clk:
396 return zynq_clk_get_peripheral_rate(priv, id, two_divs);
398 return zynq_clk_get_cpu_rate(priv, cpu_2x_clk);
399 case usb0_aper_clk ... swdt_clk:
400 return zynq_clk_get_cpu_rate(priv, cpu_1x_clk);
406 static ulong zynq_clk_set_rate(struct clk *clk, ulong rate)
408 struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
409 enum zynq_clk id = clk->id;
410 bool two_divs = false;
413 case gem0_clk ... gem1_clk:
414 return zynq_clk_set_gem_rate(priv, id, rate);
415 case fclk0_clk ... can1_clk:
418 case lqspi_clk ... pcap_clk:
419 case sdio0_clk ... spi1_clk:
420 case dbg_trc_clk ... dbg_apb_clk:
421 return zynq_clk_set_peripheral_rate(priv, id, rate, two_divs);
427 static ulong zynq_clk_get_rate(struct clk *clk)
429 struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
430 enum zynq_clk id = clk->id;
433 case cpu_6or4x_clk ... cpu_1x_clk:
434 return zynq_clk_get_cpu_rate(priv, id);
436 return zynq_clk_get_ddr3x_rate(priv);
437 case lqspi_clk ... pcap_clk:
438 case sdio0_clk ... spi1_clk:
439 return zynq_clk_get_peripheral_rate(priv, id, 0);
440 case i2c0_aper_clk ... i2c1_aper_clk:
441 return zynq_clk_get_cpu_rate(priv, cpu_1x_clk);
448 static int dummy_enable(struct clk *clk)
451 * Add implementation but by default all clocks are enabled
452 * after power up which is only one supported case now.
457 static struct clk_ops zynq_clk_ops = {
458 .get_rate = zynq_clk_get_rate,
459 #ifndef CONFIG_SPL_BUILD
460 .set_rate = zynq_clk_set_rate,
462 .enable = dummy_enable,
465 static int zynq_clk_probe(struct udevice *dev)
467 struct zynq_clk_priv *priv = dev_get_priv(dev);
468 #ifndef CONFIG_SPL_BUILD
473 for (i = 0; i < 2; i++) {
474 sprintf(name, "gem%d_emio_clk", i);
475 ret = clk_get_by_name_optional(dev, name,
476 &priv->gem_emio_clk[i]);
478 dev_err(dev, "failed to get %s clock\n", name);
484 priv->ps_clk_freq = fdtdec_get_uint(gd->fdt_blob, dev_of_offset(dev),
485 "ps-clk-frequency", 33333333UL);
490 static const struct udevice_id zynq_clk_ids[] = {
491 { .compatible = "xlnx,ps7-clkc"},
495 U_BOOT_DRIVER(zynq_clk) = {
498 .of_match = zynq_clk_ids,
499 .ops = &zynq_clk_ops,
500 .priv_auto = sizeof(struct zynq_clk_priv),
501 .probe = zynq_clk_probe,