struct clk *iface_clk;
struct clk *core_clk;
struct clk *phy_clk;
+ struct clk *aux_clk;
+ struct clk *ref_clk;
struct reset_control *pci_reset;
struct reset_control *axi_reset;
struct reset_control *ahb_reset;
if (IS_ERR(res->phy_clk))
return PTR_ERR(res->phy_clk);
+ res->aux_clk = devm_clk_get_optional(dev, "aux");
+ if (IS_ERR(res->aux_clk))
+ return PTR_ERR(res->aux_clk);
+
+ res->ref_clk = devm_clk_get_optional(dev, "ref");
+ if (IS_ERR(res->ref_clk))
+ return PTR_ERR(res->ref_clk);
+
res->pci_reset = devm_reset_control_get_exclusive(dev, "pci");
if (IS_ERR(res->pci_reset))
return PTR_ERR(res->pci_reset);
clk_disable_unprepare(res->iface_clk);
clk_disable_unprepare(res->core_clk);
clk_disable_unprepare(res->phy_clk);
+ clk_disable_unprepare(res->aux_clk);
+ clk_disable_unprepare(res->ref_clk);
regulator_bulk_disable(ARRAY_SIZE(res->supplies), res->supplies);
}
goto err_assert_ahb;
}
+ ret = clk_prepare_enable(res->core_clk);
+ if (ret) {
+ dev_err(dev, "cannot prepare/enable core clock\n");
+ goto err_clk_core;
+ }
+
ret = clk_prepare_enable(res->phy_clk);
if (ret) {
dev_err(dev, "cannot prepare/enable phy clock\n");
goto err_clk_phy;
}
- ret = clk_prepare_enable(res->core_clk);
+ ret = clk_prepare_enable(res->aux_clk);
if (ret) {
- dev_err(dev, "cannot prepare/enable core clock\n");
- goto err_clk_core;
+ dev_err(dev, "cannot prepare/enable aux clock\n");
+ goto err_clk_aux;
+ }
+
+ ret = clk_prepare_enable(res->ref_clk);
+ if (ret) {
+ dev_err(dev, "cannot prepare/enable ref clock\n");
+ goto err_clk_ref;
}
ret = reset_control_deassert(res->ahb_reset);
return 0;
err_deassert_ahb:
- clk_disable_unprepare(res->core_clk);
-err_clk_core:
+ clk_disable_unprepare(res->ref_clk);
+err_clk_ref:
+ clk_disable_unprepare(res->aux_clk);
+err_clk_aux:
clk_disable_unprepare(res->phy_clk);
err_clk_phy:
+ clk_disable_unprepare(res->core_clk);
+err_clk_core:
clk_disable_unprepare(res->iface_clk);
err_assert_ahb:
regulator_bulk_disable(ARRAY_SIZE(res->supplies), res->supplies);