Author | Tokens | Token Proportion | Commits | Commit Proportion |
---|---|---|---|---|
Prashant Gaikwad | 1145 | 45.89% | 1 | 2.94% |
Boris Brezillon | 226 | 9.06% | 3 | 8.82% |
Stephen Boyd | 201 | 8.06% | 5 | 14.71% |
Michael Walle | 197 | 7.90% | 2 | 5.88% |
Michael Turquette | 165 | 6.61% | 6 | 17.65% |
Emilio López | 134 | 5.37% | 1 | 2.94% |
Finley Xiao | 103 | 4.13% | 1 | 2.94% |
Martin Blumenstingl | 79 | 3.17% | 1 | 2.94% |
Maxime Ripard | 71 | 2.85% | 2 | 5.88% |
Russell King | 69 | 2.77% | 2 | 5.88% |
Javier Martinez Canillas | 44 | 1.76% | 1 | 2.94% |
Manivannan Sadhasivam | 28 | 1.12% | 2 | 5.88% |
Alexander Stein | 8 | 0.32% | 1 | 2.94% |
Heiko Stübner | 6 | 0.24% | 1 | 2.94% |
Miles Chen | 5 | 0.20% | 1 | 2.94% |
Anson Huang | 5 | 0.20% | 1 | 2.94% |
Sascha Hauer | 4 | 0.16% | 1 | 2.94% |
Mark Brown | 3 | 0.12% | 1 | 2.94% |
Tomeu Vizoso | 2 | 0.08% | 1 | 2.94% |
Total | 2495 | 34 |
// SPDX-License-Identifier: GPL-2.0 /* * Copyright (c) 2013 NVIDIA CORPORATION. All rights reserved. */ #include <linux/clk-provider.h> #include <linux/device.h> #include <linux/err.h> #include <linux/slab.h> static u8 clk_composite_get_parent(struct clk_hw *hw) { struct clk_composite *composite = to_clk_composite(hw); const struct clk_ops *mux_ops = composite->mux_ops; struct clk_hw *mux_hw = composite->mux_hw; __clk_hw_set_clk(mux_hw, hw); return mux_ops->get_parent(mux_hw); } static int clk_composite_set_parent(struct clk_hw *hw, u8 index) { struct clk_composite *composite = to_clk_composite(hw); const struct clk_ops *mux_ops = composite->mux_ops; struct clk_hw *mux_hw = composite->mux_hw; __clk_hw_set_clk(mux_hw, hw); return mux_ops->set_parent(mux_hw, index); } static unsigned long clk_composite_recalc_rate(struct clk_hw *hw, unsigned long parent_rate) { struct clk_composite *composite = to_clk_composite(hw); const struct clk_ops *rate_ops = composite->rate_ops; struct clk_hw *rate_hw = composite->rate_hw; __clk_hw_set_clk(rate_hw, hw); return rate_ops->recalc_rate(rate_hw, parent_rate); } static int clk_composite_determine_rate_for_parent(struct clk_hw *rate_hw, struct clk_rate_request *req, struct clk_hw *parent_hw, const struct clk_ops *rate_ops) { long rate; req->best_parent_hw = parent_hw; req->best_parent_rate = clk_hw_get_rate(parent_hw); if (rate_ops->determine_rate) return rate_ops->determine_rate(rate_hw, req); rate = rate_ops->round_rate(rate_hw, req->rate, &req->best_parent_rate); if (rate < 0) return rate; req->rate = rate; return 0; } static int clk_composite_determine_rate(struct clk_hw *hw, struct clk_rate_request *req) { struct clk_composite *composite = to_clk_composite(hw); const struct clk_ops *rate_ops = composite->rate_ops; const struct clk_ops *mux_ops = composite->mux_ops; struct clk_hw *rate_hw = composite->rate_hw; struct clk_hw *mux_hw = composite->mux_hw; struct clk_hw *parent; unsigned long rate_diff; unsigned long best_rate_diff = ULONG_MAX; unsigned long best_rate = 0; int i, ret; if (rate_hw && rate_ops && (rate_ops->determine_rate || rate_ops->round_rate) && mux_hw && mux_ops && mux_ops->set_parent) { req->best_parent_hw = NULL; if (clk_hw_get_flags(hw) & CLK_SET_RATE_NO_REPARENT) { struct clk_rate_request tmp_req; parent = clk_hw_get_parent(mux_hw); clk_hw_forward_rate_request(hw, req, parent, &tmp_req, req->rate); ret = clk_composite_determine_rate_for_parent(rate_hw, &tmp_req, parent, rate_ops); if (ret) return ret; req->rate = tmp_req.rate; req->best_parent_hw = tmp_req.best_parent_hw; req->best_parent_rate = tmp_req.best_parent_rate; return 0; } for (i = 0; i < clk_hw_get_num_parents(mux_hw); i++) { struct clk_rate_request tmp_req; parent = clk_hw_get_parent_by_index(mux_hw, i); if (!parent) continue; clk_hw_forward_rate_request(hw, req, parent, &tmp_req, req->rate); ret = clk_composite_determine_rate_for_parent(rate_hw, &tmp_req, parent, rate_ops); if (ret) continue; rate_diff = abs(req->rate - tmp_req.rate); if (!rate_diff || !req->best_parent_hw || best_rate_diff > rate_diff) { req->best_parent_hw = parent; req->best_parent_rate = tmp_req.best_parent_rate; best_rate_diff = rate_diff; best_rate = tmp_req.rate; } if (!rate_diff) return 0; } req->rate = best_rate; return 0; } else if (rate_hw && rate_ops && rate_ops->determine_rate) { __clk_hw_set_clk(rate_hw, hw); return rate_ops->determine_rate(rate_hw, req); } else if (mux_hw && mux_ops && mux_ops->determine_rate) { __clk_hw_set_clk(mux_hw, hw); return mux_ops->determine_rate(mux_hw, req); } else { pr_err("clk: clk_composite_determine_rate function called, but no mux or rate callback set!\n"); return -EINVAL; } } static long clk_composite_round_rate(struct clk_hw *hw, unsigned long rate, unsigned long *prate) { struct clk_composite *composite = to_clk_composite(hw); const struct clk_ops *rate_ops = composite->rate_ops; struct clk_hw *rate_hw = composite->rate_hw; __clk_hw_set_clk(rate_hw, hw); return rate_ops->round_rate(rate_hw, rate, prate); } static int clk_composite_set_rate(struct clk_hw *hw, unsigned long rate, unsigned long parent_rate) { struct clk_composite *composite = to_clk_composite(hw); const struct clk_ops *rate_ops = composite->rate_ops; struct clk_hw *rate_hw = composite->rate_hw; __clk_hw_set_clk(rate_hw, hw); return rate_ops->set_rate(rate_hw, rate, parent_rate); } static int clk_composite_set_rate_and_parent(struct clk_hw *hw, unsigned long rate, unsigned long parent_rate, u8 index) { struct clk_composite *composite = to_clk_composite(hw); const struct clk_ops *rate_ops = composite->rate_ops; const struct clk_ops *mux_ops = composite->mux_ops; struct clk_hw *rate_hw = composite->rate_hw; struct clk_hw *mux_hw = composite->mux_hw; unsigned long temp_rate; __clk_hw_set_clk(rate_hw, hw); __clk_hw_set_clk(mux_hw, hw); temp_rate = rate_ops->recalc_rate(rate_hw, parent_rate); if (temp_rate > rate) { rate_ops->set_rate(rate_hw, rate, parent_rate); mux_ops->set_parent(mux_hw, index); } else { mux_ops->set_parent(mux_hw, index); rate_ops->set_rate(rate_hw, rate, parent_rate); } return 0; } static int clk_composite_is_enabled(struct clk_hw *hw) { struct clk_composite *composite = to_clk_composite(hw); const struct clk_ops *gate_ops = composite->gate_ops; struct clk_hw *gate_hw = composite->gate_hw; __clk_hw_set_clk(gate_hw, hw); return gate_ops->is_enabled(gate_hw); } static int clk_composite_enable(struct clk_hw *hw) { struct clk_composite *composite = to_clk_composite(hw); const struct clk_ops *gate_ops = composite->gate_ops; struct clk_hw *gate_hw = composite->gate_hw; __clk_hw_set_clk(gate_hw, hw); return gate_ops->enable(gate_hw); } static void clk_composite_disable(struct clk_hw *hw) { struct clk_composite *composite = to_clk_composite(hw); const struct clk_ops *gate_ops = composite->gate_ops; struct clk_hw *gate_hw = composite->gate_hw; __clk_hw_set_clk(gate_hw, hw); gate_ops->disable(gate_hw); } static struct clk_hw *__clk_hw_register_composite(struct device *dev, const char *name, const char * const *parent_names, const struct clk_parent_data *pdata, int num_parents, struct clk_hw *mux_hw, const struct clk_ops *mux_ops, struct clk_hw *rate_hw, const struct clk_ops *rate_ops, struct clk_hw *gate_hw, const struct clk_ops *gate_ops, unsigned long flags) { struct clk_hw *hw; struct clk_init_data init = {}; struct clk_composite *composite; struct clk_ops *clk_composite_ops; int ret; composite = kzalloc(sizeof(*composite), GFP_KERNEL); if (!composite) return ERR_PTR(-ENOMEM); init.name = name; init.flags = flags; if (parent_names) init.parent_names = parent_names; else init.parent_data = pdata; init.num_parents = num_parents; hw = &composite->hw; clk_composite_ops = &composite->ops; if (mux_hw && mux_ops) { if (!mux_ops->get_parent) { hw = ERR_PTR(-EINVAL); goto err; } composite->mux_hw = mux_hw; composite->mux_ops = mux_ops; clk_composite_ops->get_parent = clk_composite_get_parent; if (mux_ops->set_parent) clk_composite_ops->set_parent = clk_composite_set_parent; if (mux_ops->determine_rate) clk_composite_ops->determine_rate = clk_composite_determine_rate; } if (rate_hw && rate_ops) { if (!rate_ops->recalc_rate) { hw = ERR_PTR(-EINVAL); goto err; } clk_composite_ops->recalc_rate = clk_composite_recalc_rate; if (rate_ops->determine_rate) clk_composite_ops->determine_rate = clk_composite_determine_rate; else if (rate_ops->round_rate) clk_composite_ops->round_rate = clk_composite_round_rate; /* .set_rate requires either .round_rate or .determine_rate */ if (rate_ops->set_rate) { if (rate_ops->determine_rate || rate_ops->round_rate) clk_composite_ops->set_rate = clk_composite_set_rate; else WARN(1, "%s: missing round_rate op is required\n", __func__); } composite->rate_hw = rate_hw; composite->rate_ops = rate_ops; } if (mux_hw && mux_ops && rate_hw && rate_ops) { if (mux_ops->set_parent && rate_ops->set_rate) clk_composite_ops->set_rate_and_parent = clk_composite_set_rate_and_parent; } if (gate_hw && gate_ops) { if (!gate_ops->is_enabled || !gate_ops->enable || !gate_ops->disable) { hw = ERR_PTR(-EINVAL); goto err; } composite->gate_hw = gate_hw; composite->gate_ops = gate_ops; clk_composite_ops->is_enabled = clk_composite_is_enabled; clk_composite_ops->enable = clk_composite_enable; clk_composite_ops->disable = clk_composite_disable; } init.ops = clk_composite_ops; composite->hw.init = &init; ret = clk_hw_register(dev, hw); if (ret) { hw = ERR_PTR(ret); goto err; } if (composite->mux_hw) composite->mux_hw->clk = hw->clk; if (composite->rate_hw) composite->rate_hw->clk = hw->clk; if (composite->gate_hw) composite->gate_hw->clk = hw->clk; return hw; err: kfree(composite); return hw; } struct clk_hw *clk_hw_register_composite(struct device *dev, const char *name, const char * const *parent_names, int num_parents, struct clk_hw *mux_hw, const struct clk_ops *mux_ops, struct clk_hw *rate_hw, const struct clk_ops *rate_ops, struct clk_hw *gate_hw, const struct clk_ops *gate_ops, unsigned long flags) { return __clk_hw_register_composite(dev, name, parent_names, NULL, num_parents, mux_hw, mux_ops, rate_hw, rate_ops, gate_hw, gate_ops, flags); } EXPORT_SYMBOL_GPL(clk_hw_register_composite); struct clk_hw *clk_hw_register_composite_pdata(struct device *dev, const char *name, const struct clk_parent_data *parent_data, int num_parents, struct clk_hw *mux_hw, const struct clk_ops *mux_ops, struct clk_hw *rate_hw, const struct clk_ops *rate_ops, struct clk_hw *gate_hw, const struct clk_ops *gate_ops, unsigned long flags) { return __clk_hw_register_composite(dev, name, NULL, parent_data, num_parents, mux_hw, mux_ops, rate_hw, rate_ops, gate_hw, gate_ops, flags); } struct clk *clk_register_composite(struct device *dev, const char *name, const char * const *parent_names, int num_parents, struct clk_hw *mux_hw, const struct clk_ops *mux_ops, struct clk_hw *rate_hw, const struct clk_ops *rate_ops, struct clk_hw *gate_hw, const struct clk_ops *gate_ops, unsigned long flags) { struct clk_hw *hw; hw = clk_hw_register_composite(dev, name, parent_names, num_parents, mux_hw, mux_ops, rate_hw, rate_ops, gate_hw, gate_ops, flags); if (IS_ERR(hw)) return ERR_CAST(hw); return hw->clk; } EXPORT_SYMBOL_GPL(clk_register_composite); struct clk *clk_register_composite_pdata(struct device *dev, const char *name, const struct clk_parent_data *parent_data, int num_parents, struct clk_hw *mux_hw, const struct clk_ops *mux_ops, struct clk_hw *rate_hw, const struct clk_ops *rate_ops, struct clk_hw *gate_hw, const struct clk_ops *gate_ops, unsigned long flags) { struct clk_hw *hw; hw = clk_hw_register_composite_pdata(dev, name, parent_data, num_parents, mux_hw, mux_ops, rate_hw, rate_ops, gate_hw, gate_ops, flags); if (IS_ERR(hw)) return ERR_CAST(hw); return hw->clk; } void clk_unregister_composite(struct clk *clk) { struct clk_composite *composite; struct clk_hw *hw; hw = __clk_get_hw(clk); if (!hw) return; composite = to_clk_composite(hw); clk_unregister(clk); kfree(composite); } void clk_hw_unregister_composite(struct clk_hw *hw) { struct clk_composite *composite; composite = to_clk_composite(hw); clk_hw_unregister(hw); kfree(composite); } EXPORT_SYMBOL_GPL(clk_hw_unregister_composite); static void devm_clk_hw_release_composite(struct device *dev, void *res) { clk_hw_unregister_composite(*(struct clk_hw **)res); } static struct clk_hw *__devm_clk_hw_register_composite(struct device *dev, const char *name, const char * const *parent_names, const struct clk_parent_data *pdata, int num_parents, struct clk_hw *mux_hw, const struct clk_ops *mux_ops, struct clk_hw *rate_hw, const struct clk_ops *rate_ops, struct clk_hw *gate_hw, const struct clk_ops *gate_ops, unsigned long flags) { struct clk_hw **ptr, *hw; ptr = devres_alloc(devm_clk_hw_release_composite, sizeof(*ptr), GFP_KERNEL); if (!ptr) return ERR_PTR(-ENOMEM); hw = __clk_hw_register_composite(dev, name, parent_names, pdata, num_parents, mux_hw, mux_ops, rate_hw, rate_ops, gate_hw, gate_ops, flags); if (!IS_ERR(hw)) { *ptr = hw; devres_add(dev, ptr); } else { devres_free(ptr); } return hw; } struct clk_hw *devm_clk_hw_register_composite_pdata(struct device *dev, const char *name, const struct clk_parent_data *parent_data, int num_parents, struct clk_hw *mux_hw, const struct clk_ops *mux_ops, struct clk_hw *rate_hw, const struct clk_ops *rate_ops, struct clk_hw *gate_hw, const struct clk_ops *gate_ops, unsigned long flags) { return __devm_clk_hw_register_composite(dev, name, NULL, parent_data, num_parents, mux_hw, mux_ops, rate_hw, rate_ops, gate_hw, gate_ops, flags); }
Information contained on this website is for historical information purposes only and does not indicate or represent copyright ownership.
Created with Cregit http://github.com/cregit/cregit
Version 2.0-RC1