Release 4.12 drivers/clk/clk-divider.c
  
  
  
/*
 * Copyright (C) 2011 Sascha Hauer, Pengutronix <s.hauer@pengutronix.de>
 * Copyright (C) 2011 Richard Zhao, Linaro <richard.zhao@linaro.org>
 * Copyright (C) 2011-2012 Mike Turquette, Linaro Ltd <mturquette@linaro.org>
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License version 2 as
 * published by the Free Software Foundation.
 *
 * Adjustable divider clock implementation
 */
#include <linux/clk-provider.h>
#include <linux/module.h>
#include <linux/slab.h>
#include <linux/io.h>
#include <linux/err.h>
#include <linux/string.h>
#include <linux/log2.h>
/*
 * DOC: basic adjustable divider clock that cannot gate
 *
 * Traits of this clock:
 * prepare - clk_prepare only ensures that parents are prepared
 * enable - clk_enable only ensures that parents are enabled
 * rate - rate is adjustable.  clk->rate = ceiling(parent->rate / divisor)
 * parent - fixed parent.  No clk_set_parent support
 */
#define div_mask(width)	((1 << (width)) - 1)
static unsigned int _get_table_maxdiv(const struct clk_div_table *table,
				      u8 width)
{
	unsigned int maxdiv = 0, mask = div_mask(width);
	const struct clk_div_table *clkt;
	for (clkt = table; clkt->div; clkt++)
		if (clkt->div > maxdiv && clkt->val <= mask)
			maxdiv = clkt->div;
	return maxdiv;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Rajendra Nayak | 55 | 77.46% | 1 | 50.00% | 
| Stephen Boyd | 16 | 22.54% | 1 | 50.00% | 
| Total | 71 | 100.00% | 2 | 100.00% | 
static unsigned int _get_table_mindiv(const struct clk_div_table *table)
{
	unsigned int mindiv = UINT_MAX;
	const struct clk_div_table *clkt;
	for (clkt = table; clkt->div; clkt++)
		if (clkt->div < mindiv)
			mindiv = clkt->div;
	return mindiv;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Maxime Coquelin | 55 | 100.00% | 1 | 100.00% | 
| Total | 55 | 100.00% | 1 | 100.00% | 
static unsigned int _get_maxdiv(const struct clk_div_table *table, u8 width,
				unsigned long flags)
{
	if (flags & CLK_DIVIDER_ONE_BASED)
		return div_mask(width);
	if (flags & CLK_DIVIDER_POWER_OF_TWO)
		return 1 << div_mask(width);
	if (table)
		return _get_table_maxdiv(table, width);
	return div_mask(width) + 1;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Rajendra Nayak | 51 | 77.27% | 2 | 50.00% | 
| Stephen Boyd | 15 | 22.73% | 2 | 50.00% | 
| Total | 66 | 100.00% | 4 | 100.00% | 
static unsigned int _get_table_div(const struct clk_div_table *table,
							unsigned int val)
{
	const struct clk_div_table *clkt;
	for (clkt = table; clkt->div; clkt++)
		if (clkt->val == val)
			return clkt->div;
	return 0;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Rajendra Nayak | 52 | 100.00% | 1 | 100.00% | 
| Total | 52 | 100.00% | 1 | 100.00% | 
static unsigned int _get_div(const struct clk_div_table *table,
			     unsigned int val, unsigned long flags, u8 width)
{
	if (flags & CLK_DIVIDER_ONE_BASED)
		return val;
	if (flags & CLK_DIVIDER_POWER_OF_TWO)
		return 1 << val;
	if (flags & CLK_DIVIDER_MAX_AT_ZERO)
		return val ? val : div_mask(width) + 1;
	if (table)
		return _get_table_div(table, val);
	return val + 1;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Rajendra Nayak | 51 | 64.56% | 2 | 50.00% | 
| Jim Quinlan | 21 | 26.58% | 1 | 25.00% | 
| Stephen Boyd | 7 | 8.86% | 1 | 25.00% | 
| Total | 79 | 100.00% | 4 | 100.00% | 
static unsigned int _get_table_val(const struct clk_div_table *table,
							unsigned int div)
{
	const struct clk_div_table *clkt;
	for (clkt = table; clkt->div; clkt++)
		if (clkt->div == div)
			return clkt->val;
	return 0;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Rajendra Nayak | 52 | 100.00% | 1 | 100.00% | 
| Total | 52 | 100.00% | 1 | 100.00% | 
static unsigned int _get_val(const struct clk_div_table *table,
			     unsigned int div, unsigned long flags, u8 width)
{
	if (flags & CLK_DIVIDER_ONE_BASED)
		return div;
	if (flags & CLK_DIVIDER_POWER_OF_TWO)
		return __ffs(div);
	if (flags & CLK_DIVIDER_MAX_AT_ZERO)
		return (div == div_mask(width) + 1) ? 0 : div;
	if (table)
		return  _get_table_val(table, div);
	return div - 1;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Rajendra Nayak | 50 | 59.52% | 2 | 40.00% | 
| Jim Quinlan | 25 | 29.76% | 1 | 20.00% | 
| Stephen Boyd | 7 | 8.33% | 1 | 20.00% | 
| James Hogan | 2 | 2.38% | 1 | 20.00% | 
| Total | 84 | 100.00% | 5 | 100.00% | 
unsigned long divider_recalc_rate(struct clk_hw *hw, unsigned long parent_rate,
				  unsigned int val,
				  const struct clk_div_table *table,
				  unsigned long flags)
{
	struct clk_divider *divider = to_clk_divider(hw);
	unsigned int div;
	div = _get_div(table, val, flags, divider->width);
	if (!div) {
		WARN(!(flags & CLK_DIVIDER_ALLOW_ZERO),
			"%s: Zero divisor and CLK_DIVIDER_ALLOW_ZERO not set\n",
			clk_hw_get_name(hw));
		return parent_rate;
	}
	return DIV_ROUND_UP_ULL((u64)parent_rate, div);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Michael Turquette | 27 | 28.12% | 1 | 12.50% | 
| Rajendra Nayak | 22 | 22.92% | 1 | 12.50% | 
| Stephen Boyd | 19 | 19.79% | 2 | 25.00% | 
| Jim Quinlan | 14 | 14.58% | 1 | 12.50% | 
| Sören Brinkmann | 7 | 7.29% | 1 | 12.50% | 
| Brian Norris | 4 | 4.17% | 1 | 12.50% | 
| Tomi Valkeinen | 3 | 3.12% | 1 | 12.50% | 
| Total | 96 | 100.00% | 8 | 100.00% | 
EXPORT_SYMBOL_GPL(divider_recalc_rate);
static unsigned long clk_divider_recalc_rate(struct clk_hw *hw,
		unsigned long parent_rate)
{
	struct clk_divider *divider = to_clk_divider(hw);
	unsigned int val;
	val = clk_readl(divider->reg) >> divider->shift;
	val &= div_mask(divider->width);
	return divider_recalc_rate(hw, parent_rate, val, divider->table,
				   divider->flags);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Stephen Boyd | 70 | 100.00% | 1 | 100.00% | 
| Total | 70 | 100.00% | 1 | 100.00% | 
static bool _is_valid_table_div(const struct clk_div_table *table,
							 unsigned int div)
{
	const struct clk_div_table *clkt;
	for (clkt = table; clkt->div; clkt++)
		if (clkt->div == div)
			return true;
	return false;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Rajendra Nayak | 49 | 100.00% | 1 | 100.00% | 
| Total | 49 | 100.00% | 1 | 100.00% | 
static bool _is_valid_div(const struct clk_div_table *table, unsigned int div,
			  unsigned long flags)
{
	if (flags & CLK_DIVIDER_POWER_OF_TWO)
		return is_power_of_2(div);
	if (table)
		return _is_valid_table_div(table, div);
	return true;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Rajendra Nayak | 39 | 82.98% | 1 | 33.33% | 
| Stephen Boyd | 7 | 14.89% | 1 | 33.33% | 
| James Hogan | 1 | 2.13% | 1 | 33.33% | 
| Total | 47 | 100.00% | 3 | 100.00% | 
static int _round_up_table(const struct clk_div_table *table, int div)
{
	const struct clk_div_table *clkt;
	int up = INT_MAX;
	for (clkt = table; clkt->div; clkt++) {
		if (clkt->div == div)
			return clkt->div;
		else if (clkt->div < div)
			continue;
		if ((clkt->div - div) < (up - div))
			up = clkt->div;
	}
	return up;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Maxime Coquelin | 89 | 100.00% | 2 | 100.00% | 
| Total | 89 | 100.00% | 2 | 100.00% | 
static int _round_down_table(const struct clk_div_table *table, int div)
{
	const struct clk_div_table *clkt;
	int down = _get_table_mindiv(table);
	for (clkt = table; clkt->div; clkt++) {
		if (clkt->div == div)
			return clkt->div;
		else if (clkt->div > div)
			continue;
		if ((div - clkt->div) < (div - down))
			down = clkt->div;
	}
	return down;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Maxime Coquelin | 92 | 100.00% | 1 | 100.00% | 
| Total | 92 | 100.00% | 1 | 100.00% | 
static int _div_round_up(const struct clk_div_table *table,
			 unsigned long parent_rate, unsigned long rate,
			 unsigned long flags)
{
	int div = DIV_ROUND_UP_ULL((u64)parent_rate, rate);
	if (flags & CLK_DIVIDER_POWER_OF_TWO)
		div = __roundup_pow_of_two(div);
	if (table)
		div = _round_up_table(table, div);
	return div;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Maxime Coquelin | 55 | 83.33% | 1 | 33.33% | 
| Stephen Boyd | 7 | 10.61% | 1 | 33.33% | 
| Brian Norris | 4 | 6.06% | 1 | 33.33% | 
| Total | 66 | 100.00% | 3 | 100.00% | 
static int _div_round_closest(const struct clk_div_table *table,
			      unsigned long parent_rate, unsigned long rate,
			      unsigned long flags)
{
	int up, down;
	unsigned long up_rate, down_rate;
	up = DIV_ROUND_UP_ULL((u64)parent_rate, rate);
	down = parent_rate / rate;
	if (flags & CLK_DIVIDER_POWER_OF_TWO) {
		up = __roundup_pow_of_two(up);
		down = __rounddown_pow_of_two(down);
	} else if (table) {
		up = _round_up_table(table, up);
		down = _round_down_table(table, down);
	}
	up_rate = DIV_ROUND_UP_ULL((u64)parent_rate, up);
	down_rate = DIV_ROUND_UP_ULL((u64)parent_rate, down);
	return (rate - up_rate) <= (down_rate - rate) ? up : down;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Maxime Coquelin | 86 | 60.99% | 1 | 20.00% | 
| Uwe Kleine-König | 36 | 25.53% | 2 | 40.00% | 
| Brian Norris | 12 | 8.51% | 1 | 20.00% | 
| Stephen Boyd | 7 | 4.96% | 1 | 20.00% | 
| Total | 141 | 100.00% | 5 | 100.00% | 
static int _div_round(const struct clk_div_table *table,
		      unsigned long parent_rate, unsigned long rate,
		      unsigned long flags)
{
	if (flags & CLK_DIVIDER_ROUND_CLOSEST)
		return _div_round_closest(table, parent_rate, rate, flags);
	return _div_round_up(table, parent_rate, rate, flags);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Maxime Coquelin | 41 | 75.93% | 1 | 50.00% | 
| Stephen Boyd | 13 | 24.07% | 1 | 50.00% | 
| Total | 54 | 100.00% | 2 | 100.00% | 
static bool _is_best_div(unsigned long rate, unsigned long now,
			 unsigned long best, unsigned long flags)
{
	if (flags & CLK_DIVIDER_ROUND_CLOSEST)
		return abs(rate - now) < abs(rate - best);
	return now <= rate && now > best;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Maxime Coquelin | 42 | 80.77% | 1 | 33.33% | 
| Tomasz Figa | 6 | 11.54% | 1 | 33.33% | 
| Stephen Boyd | 4 | 7.69% | 1 | 33.33% | 
| Total | 52 | 100.00% | 3 | 100.00% | 
static int _next_div(const struct clk_div_table *table, int div,
		     unsigned long flags)
{
	div++;
	if (flags & CLK_DIVIDER_POWER_OF_TWO)
		return __roundup_pow_of_two(div);
	if (table)
		return _round_up_table(table, div);
	return div;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Maxime Coquelin | 42 | 85.71% | 1 | 50.00% | 
| Stephen Boyd | 7 | 14.29% | 1 | 50.00% | 
| Total | 49 | 100.00% | 2 | 100.00% | 
static int clk_divider_bestdiv(struct clk_hw *hw, unsigned long rate,
			       unsigned long *best_parent_rate,
			       const struct clk_div_table *table, u8 width,
			       unsigned long flags)
{
	int i, bestdiv = 0;
	unsigned long parent_rate, best = 0, now, maxdiv;
	unsigned long parent_rate_saved = *best_parent_rate;
	if (!rate)
		rate = 1;
	maxdiv = _get_maxdiv(table, width, flags);
	if (!(clk_hw_get_flags(hw) & CLK_SET_RATE_PARENT)) {
		parent_rate = *best_parent_rate;
		bestdiv = _div_round(table, parent_rate, rate, flags);
		bestdiv = bestdiv == 0 ? 1 : bestdiv;
		bestdiv = bestdiv > maxdiv ? maxdiv : bestdiv;
		return bestdiv;
	}
	/*
         * The maximum divider we can use without overflowing
         * unsigned long in rate * i below
         */
	maxdiv = min(ULONG_MAX / rate, maxdiv);
	for (i = _next_div(table, 0, flags); i <= maxdiv;
					     i = _next_div(table, i, flags)) {
		if (rate * i == parent_rate_saved) {
			/*
                         * It's the most ideal case if the requested rate can be
                         * divided from parent clock without needing to change
                         * parent rate, so return the divider immediately.
                         */
			*best_parent_rate = parent_rate_saved;
			return i;
		}
		parent_rate = clk_hw_round_rate(clk_hw_get_parent(hw),
					       rate * i);
		now = DIV_ROUND_UP_ULL((u64)parent_rate, i);
		if (_is_best_div(rate, now, best, flags)) {
			bestdiv = i;
			best = now;
			*best_parent_rate = parent_rate;
		}
	}
	if (!bestdiv) {
		bestdiv = _get_maxdiv(table, width, flags);
		*best_parent_rate = clk_hw_round_rate(clk_hw_get_parent(hw), 1);
	}
	return bestdiv;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Michael Turquette | 176 | 62.19% | 1 | 7.14% | 
| Stephen Boyd | 36 | 12.72% | 3 | 21.43% | 
| Shawn Guo | 34 | 12.01% | 2 | 14.29% | 
| Maxime Coquelin | 13 | 4.59% | 3 | 21.43% | 
| Masahiro Yamada | 8 | 2.83% | 1 | 7.14% | 
| Rajendra Nayak | 8 | 2.83% | 1 | 7.14% | 
| Brian Norris | 4 | 1.41% | 1 | 7.14% | 
| Tomi Valkeinen | 3 | 1.06% | 1 | 7.14% | 
| Uwe Kleine-König | 1 | 0.35% | 1 | 7.14% | 
| Total | 283 | 100.00% | 14 | 100.00% | 
long divider_round_rate(struct clk_hw *hw, unsigned long rate,
			unsigned long *prate, const struct clk_div_table *table,
			u8 width, unsigned long flags)
{
	int div;
	div = clk_divider_bestdiv(hw, rate, prate, table, width, flags);
	return DIV_ROUND_UP_ULL((u64)*prate, div);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Michael Turquette | 37 | 57.81% | 1 | 25.00% | 
| Stephen Boyd | 20 | 31.25% | 1 | 25.00% | 
| Brian Norris | 4 | 6.25% | 1 | 25.00% | 
| Tomi Valkeinen | 3 | 4.69% | 1 | 25.00% | 
| Total | 64 | 100.00% | 4 | 100.00% | 
EXPORT_SYMBOL_GPL(divider_round_rate);
static long clk_divider_round_rate(struct clk_hw *hw, unsigned long rate,
				unsigned long *prate)
{
	struct clk_divider *divider = to_clk_divider(hw);
	int bestdiv;
	/* if read only, just return current value */
	if (divider->flags & CLK_DIVIDER_READ_ONLY) {
		bestdiv = clk_readl(divider->reg) >> divider->shift;
		bestdiv &= div_mask(divider->width);
		bestdiv = _get_div(divider->table, bestdiv, divider->flags,
			divider->width);
		return DIV_ROUND_UP_ULL((u64)*prate, bestdiv);
	}
	return divider_round_rate(hw, rate, prate, divider->table,
				  divider->width, divider->flags);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Stephen Boyd | 78 | 65.55% | 1 | 14.29% | 
| Michael Turquette | 24 | 20.17% | 1 | 14.29% | 
| Heiko Stübner | 5 | 4.20% | 1 | 14.29% | 
| Brian Norris | 4 | 3.36% | 1 | 14.29% | 
| Jim Quinlan | 4 | 3.36% | 1 | 14.29% | 
| Shawn Guo | 3 | 2.52% | 1 | 14.29% | 
| Geert Uytterhoeven | 1 | 0.84% | 1 | 14.29% | 
| Total | 119 | 100.00% | 7 | 100.00% | 
int divider_get_val(unsigned long rate, unsigned long parent_rate,
		    const struct clk_div_table *table, u8 width,
		    unsigned long flags)
{
	unsigned int div, value;
	div = DIV_ROUND_UP_ULL((u64)parent_rate, rate);
	if (!_is_valid_div(table, div, flags))
		return -EINVAL;
	value = _get_val(table, div, flags, width);
	return min_t(unsigned int, value, div_mask(width));
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Stephen Boyd | 44 | 53.01% | 1 | 12.50% | 
| Maxime Coquelin | 13 | 15.66% | 1 | 12.50% | 
| Michael Turquette | 10 | 12.05% | 1 | 12.50% | 
| Rajendra Nayak | 6 | 7.23% | 1 | 12.50% | 
| Brian Norris | 4 | 4.82% | 1 | 12.50% | 
| Tomi Valkeinen | 3 | 3.61% | 1 | 12.50% | 
| Jim Quinlan | 2 | 2.41% | 1 | 12.50% | 
| Shawn Guo | 1 | 1.20% | 1 | 12.50% | 
| Total | 83 | 100.00% | 8 | 100.00% | 
EXPORT_SYMBOL_GPL(divider_get_val);
static int clk_divider_set_rate(struct clk_hw *hw, unsigned long rate,
				unsigned long parent_rate)
{
	struct clk_divider *divider = to_clk_divider(hw);
	unsigned int value;
	unsigned long flags = 0;
	u32 val;
	value = divider_get_val(rate, parent_rate, divider->table,
				divider->width, divider->flags);
	if (divider->lock)
		spin_lock_irqsave(divider->lock, flags);
	else
		__acquire(divider->lock);
	if (divider->flags & CLK_DIVIDER_HIWORD_MASK) {
		val = div_mask(divider->width) << (divider->shift + 16);
	} else {
		val = clk_readl(divider->reg);
		val &= ~(div_mask(divider->width) << divider->shift);
	}
	val |= value << divider->shift;
	clk_writel(val, divider->reg);
	if (divider->lock)
		spin_unlock_irqrestore(divider->lock, flags);
	else
		__release(divider->lock);
	return 0;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Michael Turquette | 78 | 42.39% | 1 | 16.67% | 
| Stephen Boyd | 74 | 40.22% | 2 | 33.33% | 
| Haojian Zhuang | 28 | 15.22% | 1 | 16.67% | 
| Rajendra Nayak | 2 | 1.09% | 1 | 16.67% | 
| Gerhard Sittig | 2 | 1.09% | 1 | 16.67% | 
| Total | 184 | 100.00% | 6 | 100.00% | 
const struct clk_ops clk_divider_ops = {
	.recalc_rate = clk_divider_recalc_rate,
	.round_rate = clk_divider_round_rate,
	.set_rate = clk_divider_set_rate,
};
EXPORT_SYMBOL_GPL(clk_divider_ops);
const struct clk_ops clk_divider_ro_ops = {
	.recalc_rate = clk_divider_recalc_rate,
	.round_rate = clk_divider_round_rate,
};
EXPORT_SYMBOL_GPL(clk_divider_ro_ops);
static struct clk_hw *_register_divider(struct device *dev, const char *name,
		const char *parent_name, unsigned long flags,
		void __iomem *reg, u8 shift, u8 width,
		u8 clk_divider_flags, const struct clk_div_table *table,
		spinlock_t *lock)
{
	struct clk_divider *div;
	struct clk_hw *hw;
	struct clk_init_data init;
	int ret;
	if (clk_divider_flags & CLK_DIVIDER_HIWORD_MASK) {
		if (width + shift > 16) {
			pr_warn("divider value exceeds LOWORD field\n");
			return ERR_PTR(-EINVAL);
		}
	}
	/* allocate the divider */
	div = kzalloc(sizeof(*div), GFP_KERNEL);
	if (!div)
		return ERR_PTR(-ENOMEM);
	init.name = name;
	if (clk_divider_flags & CLK_DIVIDER_READ_ONLY)
		init.ops = &clk_divider_ro_ops;
	else
		init.ops = &clk_divider_ops;
	init.flags = flags | CLK_IS_BASIC;
	init.parent_names = (parent_name ? &parent_name: NULL);
	init.num_parents = (parent_name ? 1 : 0);
	/* struct clk_divider assignments */
	div->reg = reg;
	div->shift = shift;
	div->width = width;
	div->flags = clk_divider_flags;
	div->lock = lock;
	div->hw.init = &init;
	div->table = table;
	/* register the clock */
	hw = &div->hw;
	ret = clk_hw_register(dev, hw);
	if (ret) {
		kfree(div);
		hw = ERR_PTR(ret);
	}
	return hw;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Michael Turquette | 123 | 45.90% | 2 | 22.22% | 
| Saravana Kannan | 57 | 21.27% | 1 | 11.11% | 
| Haojian Zhuang | 30 | 11.19% | 1 | 11.11% | 
| Stephen Boyd | 28 | 10.45% | 2 | 22.22% | 
| Rajendra Nayak | 16 | 5.97% | 2 | 22.22% | 
| Heiko Stübner | 14 | 5.22% | 1 | 11.11% | 
| Total | 268 | 100.00% | 9 | 100.00% | 
/**
 * clk_register_divider - register a divider clock with the clock framework
 * @dev: device registering this clock
 * @name: name of this clock
 * @parent_name: name of clock's parent
 * @flags: framework-specific flags
 * @reg: register address to adjust divider
 * @shift: number of bits to shift the bitfield
 * @width: width of the bitfield
 * @clk_divider_flags: divider-specific flags for this clock
 * @lock: shared register lock for this clock
 */
struct clk *clk_register_divider(struct device *dev, const char *name,
		const char *parent_name, unsigned long flags,
		void __iomem *reg, u8 shift, u8 width,
		u8 clk_divider_flags, spinlock_t *lock)
{
	struct clk_hw *hw;
	hw =  _register_divider(dev, name, parent_name, flags, reg, shift,
			width, clk_divider_flags, NULL, lock);
	if (IS_ERR(hw))
		return ERR_CAST(hw);
	return hw->clk;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Rajendra Nayak | 67 | 72.83% | 1 | 50.00% | 
| Stephen Boyd | 25 | 27.17% | 1 | 50.00% | 
| Total | 92 | 100.00% | 2 | 100.00% | 
EXPORT_SYMBOL_GPL(clk_register_divider);
/**
 * clk_hw_register_divider - register a divider clock with the clock framework
 * @dev: device registering this clock
 * @name: name of this clock
 * @parent_name: name of clock's parent
 * @flags: framework-specific flags
 * @reg: register address to adjust divider
 * @shift: number of bits to shift the bitfield
 * @width: width of the bitfield
 * @clk_divider_flags: divider-specific flags for this clock
 * @lock: shared register lock for this clock
 */
struct clk_hw *clk_hw_register_divider(struct device *dev, const char *name,
		const char *parent_name, unsigned long flags,
		void __iomem *reg, u8 shift, u8 width,
		u8 clk_divider_flags, spinlock_t *lock)
{
	return _register_divider(dev, name, parent_name, flags, reg, shift,
			width, clk_divider_flags, NULL, lock);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Stephen Boyd | 68 | 100.00% | 1 | 100.00% | 
| Total | 68 | 100.00% | 1 | 100.00% | 
EXPORT_SYMBOL_GPL(clk_hw_register_divider);
/**
 * clk_register_divider_table - register a table based divider clock with
 * the clock framework
 * @dev: device registering this clock
 * @name: name of this clock
 * @parent_name: name of clock's parent
 * @flags: framework-specific flags
 * @reg: register address to adjust divider
 * @shift: number of bits to shift the bitfield
 * @width: width of the bitfield
 * @clk_divider_flags: divider-specific flags for this clock
 * @table: array of divider/value pairs ending with a div set to 0
 * @lock: shared register lock for this clock
 */
struct clk *clk_register_divider_table(struct device *dev, const char *name,
		const char *parent_name, unsigned long flags,
		void __iomem *reg, u8 shift, u8 width,
		u8 clk_divider_flags, const struct clk_div_table *table,
		spinlock_t *lock)
{
	struct clk_hw *hw;
	hw =  _register_divider(dev, name, parent_name, flags, reg, shift,
			width, clk_divider_flags, table, lock);
	if (IS_ERR(hw))
		return ERR_CAST(hw);
	return hw->clk;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Rajendra Nayak | 73 | 74.49% | 1 | 50.00% | 
| Stephen Boyd | 25 | 25.51% | 1 | 50.00% | 
| Total | 98 | 100.00% | 2 | 100.00% | 
EXPORT_SYMBOL_GPL(clk_register_divider_table);
/**
 * clk_hw_register_divider_table - register a table based divider clock with
 * the clock framework
 * @dev: device registering this clock
 * @name: name of this clock
 * @parent_name: name of clock's parent
 * @flags: framework-specific flags
 * @reg: register address to adjust divider
 * @shift: number of bits to shift the bitfield
 * @width: width of the bitfield
 * @clk_divider_flags: divider-specific flags for this clock
 * @table: array of divider/value pairs ending with a div set to 0
 * @lock: shared register lock for this clock
 */
struct clk_hw *clk_hw_register_divider_table(struct device *dev,
		const char *name, const char *parent_name, unsigned long flags,
		void __iomem *reg, u8 shift, u8 width,
		u8 clk_divider_flags, const struct clk_div_table *table,
		spinlock_t *lock)
{
	return _register_divider(dev, name, parent_name, flags, reg, shift,
			width, clk_divider_flags, table, lock);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Stephen Boyd | 74 | 100.00% | 1 | 100.00% | 
| Total | 74 | 100.00% | 1 | 100.00% | 
EXPORT_SYMBOL_GPL(clk_hw_register_divider_table);
void clk_unregister_divider(struct clk *clk)
{
	struct clk_divider *div;
	struct clk_hw *hw;
	hw = __clk_get_hw(clk);
	if (!hw)
		return;
	div = to_clk_divider(hw);
	clk_unregister(clk);
	kfree(div);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Krzysztof Kozlowski | 50 | 100.00% | 1 | 100.00% | 
| Total | 50 | 100.00% | 1 | 100.00% | 
EXPORT_SYMBOL_GPL(clk_unregister_divider);
/**
 * clk_hw_unregister_divider - unregister a clk divider
 * @hw: hardware-specific clock data to unregister
 */
void clk_hw_unregister_divider(struct clk_hw *hw)
{
	struct clk_divider *div;
	div = to_clk_divider(hw);
	clk_hw_unregister(hw);
	kfree(div);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Stephen Boyd | 32 | 100.00% | 1 | 100.00% | 
| Total | 32 | 100.00% | 1 | 100.00% | 
EXPORT_SYMBOL_GPL(clk_hw_unregister_divider);
Overall Contributors
| Person | Tokens | Prop | Commits | CommitProp | 
| Stephen Boyd | 717 | 25.53% | 7 | 17.95% | 
| Rajendra Nayak | 595 | 21.19% | 3 | 7.69% | 
| Maxime Coquelin | 528 | 18.80% | 4 | 10.26% | 
| Michael Turquette | 526 | 18.73% | 2 | 5.13% | 
| Jim Quinlan | 66 | 2.35% | 1 | 2.56% | 
| Haojian Zhuang | 58 | 2.07% | 1 | 2.56% | 
| Saravana Kannan | 57 | 2.03% | 1 | 2.56% | 
| Krzysztof Kozlowski | 55 | 1.96% | 1 | 2.56% | 
| Heiko Stübner | 41 | 1.46% | 2 | 5.13% | 
| Shawn Guo | 39 | 1.39% | 4 | 10.26% | 
| Brian Norris | 37 | 1.32% | 1 | 2.56% | 
| Uwe Kleine-König | 37 | 1.32% | 3 | 7.69% | 
| Tomi Valkeinen | 12 | 0.43% | 1 | 2.56% | 
| Fabio Estevam | 10 | 0.36% | 1 | 2.56% | 
| Masahiro Yamada | 8 | 0.28% | 1 | 2.56% | 
| Sören Brinkmann | 7 | 0.25% | 1 | 2.56% | 
| Tomasz Figa | 6 | 0.21% | 1 | 2.56% | 
| James Hogan | 6 | 0.21% | 2 | 5.13% | 
| Gerhard Sittig | 2 | 0.07% | 1 | 2.56% | 
| Geert Uytterhoeven | 1 | 0.04% | 1 | 2.56% | 
| Total | 2808 | 100.00% | 39 | 100.00% | 
Information contained on this website is for historical information purposes only and does not indicate or represent copyright ownership.