1 // SPDX-License-Identifier: GPL-2.0
3 * MediaTek common clock driver
5 * Copyright (C) 2018 MediaTek Inc.
6 * Author: Ryder Lee <ryder.lee@mediatek.com>
10 #include <clk-uclass.h>
14 #include <linux/bitops.h>
15 #include <linux/delay.h>
22 #define CON0_BASE_EN BIT(0)
23 #define CON0_PWR_ON BIT(0)
24 #define CON0_ISO_EN BIT(1)
25 #define CON1_PCW_CHG BIT(31)
27 #define POSTDIV_MASK 0x7
28 #define INTEGER_BITS 7
30 /* scpsys clock off control */
31 #define CLK_SCP_CFG0 0x200
32 #define CLK_SCP_CFG1 0x204
33 #define SCP_ARMCK_OFF_EN GENMASK(9, 0)
34 #define SCP_AXICK_DCM_DIS_EN BIT(0)
35 #define SCP_AXICK_26M_SEL_EN BIT(4)
37 /* shared functions */
40 * In case the rate change propagation to parent clocks is undesirable,
41 * this function is recursively called to find the parent to calculate
42 * the accurate frequency.
44 static ulong mtk_clk_find_parent_rate(struct clk *clk, int id,
47 struct clk parent = { .id = id, };
52 parent.dev = clk->dev;
54 return clk_get_rate(&parent);
57 static int mtk_clk_mux_set_parent(void __iomem *base, u32 parent,
58 const struct mtk_composite *mux)
62 while (mux->parent[index] != parent)
63 if (++index == mux->num_parents)
66 if (mux->flags & CLK_MUX_SETCLR_UPD) {
67 val = (mux->mux_mask << mux->mux_shift);
68 writel(val, base + mux->mux_clr_reg);
70 val = (index << mux->mux_shift);
71 writel(val, base + mux->mux_set_reg);
73 if (mux->upd_shift >= 0)
74 writel(BIT(mux->upd_shift), base + mux->upd_reg);
76 /* switch mux to a select parent */
77 val = readl(base + mux->mux_reg);
78 val &= ~(mux->mux_mask << mux->mux_shift);
80 val |= index << mux->mux_shift;
81 writel(val, base + mux->mux_reg);
87 /* apmixedsys functions */
89 static unsigned long __mtk_pll_recalc_rate(const struct mtk_pll_data *pll,
90 u32 fin, u32 pcw, int postdiv)
92 int pcwbits = pll->pcwbits;
98 /* The fractional part of the PLL divider. */
99 ibits = pll->pcwibits ? pll->pcwibits : INTEGER_BITS;
100 pcwfbits = pcwbits > ibits ? pcwbits - ibits : 0;
102 vco = (u64)fin * pcw;
104 if (pcwfbits && (vco & GENMASK(pcwfbits - 1, 0)))
112 return ((unsigned long)vco + postdiv - 1) / postdiv;
116 * MediaTek PLLs are configured through their pcw value. The pcw value
117 * describes a divider in the PLL feedback loop which consists of 7 bits
118 * for the integer part and the remaining bits (if present) for the
119 * fractional part. Also they have a 3 bit power-of-two post divider.
121 static void mtk_pll_set_rate_regs(struct clk *clk, u32 pcw, int postdiv)
123 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
124 const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
128 val = readl(priv->base + pll->pd_reg);
129 val &= ~(POSTDIV_MASK << pll->pd_shift);
130 val |= (ffs(postdiv) - 1) << pll->pd_shift;
132 /* postdiv and pcw need to set at the same time if on same register */
133 if (pll->pd_reg != pll->pcw_reg) {
134 writel(val, priv->base + pll->pd_reg);
135 val = readl(priv->base + pll->pcw_reg);
139 val &= ~GENMASK(pll->pcw_shift + pll->pcwbits - 1, pll->pcw_shift);
140 val |= pcw << pll->pcw_shift;
142 if (pll->pcw_chg_reg) {
143 chg = readl(priv->base + pll->pcw_chg_reg);
145 writel(val, priv->base + pll->pcw_reg);
146 writel(chg, priv->base + pll->pcw_chg_reg);
149 writel(val, priv->base + pll->pcw_reg);
156 * mtk_pll_calc_values - calculate good values for a given input frequency.
158 * @pcw: The pcw value (output)
159 * @postdiv: The post divider (output)
160 * @freq: The desired target frequency
162 static void mtk_pll_calc_values(struct clk *clk, u32 *pcw, u32 *postdiv,
165 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
166 const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
167 unsigned long fmin = pll->fmin ? pll->fmin : 1000 * MHZ;
172 if (freq > pll->fmax)
175 for (val = 0; val < 5; val++) {
177 if ((u64)freq * *postdiv >= fmin)
181 /* _pcw = freq * postdiv / xtal_rate * 2^pcwfbits */
182 ibits = pll->pcwibits ? pll->pcwibits : INTEGER_BITS;
183 _pcw = ((u64)freq << val) << (pll->pcwbits - ibits);
184 do_div(_pcw, priv->tree->xtal2_rate);
189 static ulong mtk_apmixedsys_set_rate(struct clk *clk, ulong rate)
194 mtk_pll_calc_values(clk, &pcw, &postdiv, rate);
195 mtk_pll_set_rate_regs(clk, pcw, postdiv);
200 static ulong mtk_apmixedsys_get_rate(struct clk *clk)
202 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
203 const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
207 postdiv = (readl(priv->base + pll->pd_reg) >> pll->pd_shift) &
209 postdiv = 1 << postdiv;
211 pcw = readl(priv->base + pll->pcw_reg) >> pll->pcw_shift;
212 pcw &= GENMASK(pll->pcwbits - 1, 0);
214 return __mtk_pll_recalc_rate(pll, priv->tree->xtal2_rate,
218 static int mtk_apmixedsys_enable(struct clk *clk)
220 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
221 const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
224 r = readl(priv->base + pll->pwr_reg) | CON0_PWR_ON;
225 writel(r, priv->base + pll->pwr_reg);
228 r = readl(priv->base + pll->pwr_reg) & ~CON0_ISO_EN;
229 writel(r, priv->base + pll->pwr_reg);
232 r = readl(priv->base + pll->reg + REG_CON0);
234 writel(r, priv->base + pll->reg + REG_CON0);
238 if (pll->flags & HAVE_RST_BAR) {
239 r = readl(priv->base + pll->reg + REG_CON0);
240 r |= pll->rst_bar_mask;
241 writel(r, priv->base + pll->reg + REG_CON0);
247 static int mtk_apmixedsys_disable(struct clk *clk)
249 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
250 const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
253 if (pll->flags & HAVE_RST_BAR) {
254 r = readl(priv->base + pll->reg + REG_CON0);
255 r &= ~pll->rst_bar_mask;
256 writel(r, priv->base + pll->reg + REG_CON0);
259 r = readl(priv->base + pll->reg + REG_CON0);
261 writel(r, priv->base + pll->reg + REG_CON0);
263 r = readl(priv->base + pll->pwr_reg) | CON0_ISO_EN;
264 writel(r, priv->base + pll->pwr_reg);
266 r = readl(priv->base + pll->pwr_reg) & ~CON0_PWR_ON;
267 writel(r, priv->base + pll->pwr_reg);
272 /* topckgen functions */
274 static ulong mtk_factor_recalc_rate(const struct mtk_fixed_factor *fdiv,
277 u64 rate = parent_rate * fdiv->mult;
279 do_div(rate, fdiv->div);
284 static ulong mtk_topckgen_get_factor_rate(struct clk *clk, u32 off)
286 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
287 const struct mtk_fixed_factor *fdiv = &priv->tree->fdivs[off];
290 switch (fdiv->flags & CLK_PARENT_MASK) {
291 case CLK_PARENT_APMIXED:
292 rate = mtk_clk_find_parent_rate(clk, fdiv->parent,
295 case CLK_PARENT_TOPCKGEN:
296 rate = mtk_clk_find_parent_rate(clk, fdiv->parent, NULL);
300 rate = priv->tree->xtal_rate;
303 return mtk_factor_recalc_rate(fdiv, rate);
306 static ulong mtk_infrasys_get_factor_rate(struct clk *clk, u32 off)
308 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
309 const struct mtk_fixed_factor *fdiv = &priv->tree->fdivs[off];
312 switch (fdiv->flags & CLK_PARENT_MASK) {
313 case CLK_PARENT_TOPCKGEN:
314 rate = mtk_clk_find_parent_rate(clk, fdiv->parent,
318 rate = mtk_clk_find_parent_rate(clk, fdiv->parent, NULL);
321 return mtk_factor_recalc_rate(fdiv, rate);
324 static ulong mtk_topckgen_get_mux_rate(struct clk *clk, u32 off)
326 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
327 const struct mtk_composite *mux = &priv->tree->muxes[off];
330 index = readl(priv->base + mux->mux_reg);
331 index &= mux->mux_mask << mux->mux_shift;
332 index = index >> mux->mux_shift;
334 if (mux->parent[index] > 0 ||
335 (mux->parent[index] == CLK_XTAL &&
336 priv->tree->flags & CLK_BYPASS_XTAL)) {
337 switch (mux->flags & CLK_PARENT_MASK) {
338 case CLK_PARENT_APMIXED:
339 return mtk_clk_find_parent_rate(clk, mux->parent[index],
343 return mtk_clk_find_parent_rate(clk, mux->parent[index],
349 return priv->tree->xtal_rate;
352 static ulong mtk_infrasys_get_mux_rate(struct clk *clk, u32 off)
354 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
355 const struct mtk_composite *mux = &priv->tree->muxes[off];
358 index = readl(priv->base + mux->mux_reg);
359 index &= mux->mux_mask << mux->mux_shift;
360 index = index >> mux->mux_shift;
362 if (mux->parent[index] > 0 ||
363 (mux->parent[index] == CLK_XTAL &&
364 priv->tree->flags & CLK_BYPASS_XTAL)) {
365 switch (mux->flags & CLK_PARENT_MASK) {
366 case CLK_PARENT_TOPCKGEN:
367 return mtk_clk_find_parent_rate(clk, mux->parent[index],
371 return mtk_clk_find_parent_rate(clk, mux->parent[index],
379 static ulong mtk_topckgen_get_rate(struct clk *clk)
381 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
383 if (clk->id < priv->tree->fdivs_offs)
384 return priv->tree->fclks[clk->id].rate;
385 else if (clk->id < priv->tree->muxes_offs)
386 return mtk_topckgen_get_factor_rate(clk, clk->id -
387 priv->tree->fdivs_offs);
389 return mtk_topckgen_get_mux_rate(clk, clk->id -
390 priv->tree->muxes_offs);
393 static ulong mtk_infrasys_get_rate(struct clk *clk)
395 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
399 if (clk->id < priv->tree->fdivs_offs) {
400 rate = priv->tree->fclks[clk->id].rate;
401 } else if (clk->id < priv->tree->muxes_offs) {
402 rate = mtk_infrasys_get_factor_rate(clk, clk->id -
403 priv->tree->fdivs_offs);
405 rate = mtk_infrasys_get_mux_rate(clk, clk->id -
406 priv->tree->muxes_offs);
412 static int mtk_clk_mux_enable(struct clk *clk)
414 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
415 const struct mtk_composite *mux;
418 if (clk->id < priv->tree->muxes_offs)
421 mux = &priv->tree->muxes[clk->id - priv->tree->muxes_offs];
422 if (mux->gate_shift < 0)
425 /* enable clock gate */
426 if (mux->flags & CLK_MUX_SETCLR_UPD) {
427 val = BIT(mux->gate_shift);
428 writel(val, priv->base + mux->mux_clr_reg);
430 val = readl(priv->base + mux->gate_reg);
431 val &= ~BIT(mux->gate_shift);
432 writel(val, priv->base + mux->gate_reg);
435 if (mux->flags & CLK_DOMAIN_SCPSYS) {
436 /* enable scpsys clock off control */
437 writel(SCP_ARMCK_OFF_EN, priv->base + CLK_SCP_CFG0);
438 writel(SCP_AXICK_DCM_DIS_EN | SCP_AXICK_26M_SEL_EN,
439 priv->base + CLK_SCP_CFG1);
445 static int mtk_clk_mux_disable(struct clk *clk)
447 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
448 const struct mtk_composite *mux;
451 if (clk->id < priv->tree->muxes_offs)
454 mux = &priv->tree->muxes[clk->id - priv->tree->muxes_offs];
455 if (mux->gate_shift < 0)
458 /* disable clock gate */
459 if (mux->flags & CLK_MUX_SETCLR_UPD) {
460 val = BIT(mux->gate_shift);
461 writel(val, priv->base + mux->mux_set_reg);
463 val = readl(priv->base + mux->gate_reg);
464 val |= BIT(mux->gate_shift);
465 writel(val, priv->base + mux->gate_reg);
471 static int mtk_common_clk_set_parent(struct clk *clk, struct clk *parent)
473 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
475 if (clk->id < priv->tree->muxes_offs)
478 return mtk_clk_mux_set_parent(priv->base, parent->id,
479 &priv->tree->muxes[clk->id - priv->tree->muxes_offs]);
484 static int mtk_clk_gate_enable(struct clk *clk)
486 struct mtk_cg_priv *priv = dev_get_priv(clk->dev);
487 const struct mtk_gate *gate = &priv->gates[clk->id];
488 u32 bit = BIT(gate->shift);
490 switch (gate->flags & CLK_GATE_MASK) {
491 case CLK_GATE_SETCLR:
492 writel(bit, priv->base + gate->regs->clr_ofs);
494 case CLK_GATE_SETCLR_INV:
495 writel(bit, priv->base + gate->regs->set_ofs);
497 case CLK_GATE_NO_SETCLR:
498 clrsetbits_le32(priv->base + gate->regs->sta_ofs, bit, 0);
500 case CLK_GATE_NO_SETCLR_INV:
501 clrsetbits_le32(priv->base + gate->regs->sta_ofs, bit, bit);
511 static int mtk_clk_gate_disable(struct clk *clk)
513 struct mtk_cg_priv *priv = dev_get_priv(clk->dev);
514 const struct mtk_gate *gate = &priv->gates[clk->id];
515 u32 bit = BIT(gate->shift);
517 switch (gate->flags & CLK_GATE_MASK) {
518 case CLK_GATE_SETCLR:
519 writel(bit, priv->base + gate->regs->set_ofs);
521 case CLK_GATE_SETCLR_INV:
522 writel(bit, priv->base + gate->regs->clr_ofs);
524 case CLK_GATE_NO_SETCLR:
525 clrsetbits_le32(priv->base + gate->regs->sta_ofs, bit, bit);
527 case CLK_GATE_NO_SETCLR_INV:
528 clrsetbits_le32(priv->base + gate->regs->sta_ofs, bit, 0);
538 static ulong mtk_clk_gate_get_rate(struct clk *clk)
540 struct mtk_cg_priv *priv = dev_get_priv(clk->dev);
541 const struct mtk_gate *gate = &priv->gates[clk->id];
543 return mtk_clk_find_parent_rate(clk, gate->parent, priv->parent);
546 const struct clk_ops mtk_clk_apmixedsys_ops = {
547 .enable = mtk_apmixedsys_enable,
548 .disable = mtk_apmixedsys_disable,
549 .set_rate = mtk_apmixedsys_set_rate,
550 .get_rate = mtk_apmixedsys_get_rate,
553 const struct clk_ops mtk_clk_topckgen_ops = {
554 .enable = mtk_clk_mux_enable,
555 .disable = mtk_clk_mux_disable,
556 .get_rate = mtk_topckgen_get_rate,
557 .set_parent = mtk_common_clk_set_parent,
560 const struct clk_ops mtk_clk_infrasys_ops = {
561 .enable = mtk_clk_mux_enable,
562 .disable = mtk_clk_mux_disable,
563 .get_rate = mtk_infrasys_get_rate,
564 .set_parent = mtk_common_clk_set_parent,
567 const struct clk_ops mtk_clk_gate_ops = {
568 .enable = mtk_clk_gate_enable,
569 .disable = mtk_clk_gate_disable,
570 .get_rate = mtk_clk_gate_get_rate,
573 int mtk_common_clk_init(struct udevice *dev,
574 const struct mtk_clk_tree *tree)
576 struct mtk_clk_priv *priv = dev_get_priv(dev);
577 struct udevice *parent;
580 priv->base = dev_read_addr_ptr(dev);
584 ret = uclass_get_device_by_phandle(UCLASS_CLK, dev, "clock-parent", &parent);
585 if (ret || !parent) {
586 ret = uclass_get_device_by_driver(UCLASS_CLK,
587 DM_DRIVER_GET(mtk_clk_apmixedsys), &parent);
592 priv->parent = parent;
598 int mtk_common_clk_gate_init(struct udevice *dev,
599 const struct mtk_clk_tree *tree,
600 const struct mtk_gate *gates)
602 struct mtk_cg_priv *priv = dev_get_priv(dev);
603 struct udevice *parent;
606 priv->base = dev_read_addr_ptr(dev);
610 ret = uclass_get_device_by_phandle(UCLASS_CLK, dev, "clock-parent", &parent);
611 if (ret || !parent) {
612 ret = uclass_get_device_by_driver(UCLASS_CLK,
613 DM_DRIVER_GET(mtk_clk_topckgen), &parent);
618 priv->parent = parent;