Release 4.14 arch/mips/bcm63xx/clk.c
/*
* This file is subject to the terms and conditions of the GNU General Public
* License. See the file "COPYING" in the main directory of this archive
* for more details.
*
* Copyright (C) 2008 Maxime Bizon <mbizon@freebox.fr>
*/
#include <linux/init.h>
#include <linux/export.h>
#include <linux/mutex.h>
#include <linux/err.h>
#include <linux/clk.h>
#include <linux/delay.h>
#include <bcm63xx_cpu.h>
#include <bcm63xx_io.h>
#include <bcm63xx_regs.h>
#include <bcm63xx_reset.h>
struct clk {
void (*set)(struct clk *, int);
unsigned int rate;
unsigned int usage;
int id;
};
static DEFINE_MUTEX(clocks_mutex);
static void clk_enable_unlocked(struct clk *clk)
{
if (clk->set && (clk->usage++) == 0)
clk->set(clk, 1);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Maxime Bizon | 35 | 100.00% | 1 | 100.00% |
Total | 35 | 100.00% | 1 | 100.00% |
static void clk_disable_unlocked(struct clk *clk)
{
if (clk->set && (--clk->usage) == 0)
clk->set(clk, 0);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Maxime Bizon | 35 | 100.00% | 1 | 100.00% |
Total | 35 | 100.00% | 1 | 100.00% |
static void bcm_hwclock_set(u32 mask, int enable)
{
u32 reg;
reg = bcm_perf_readl(PERF_CKCTL_REG);
if (enable)
reg |= mask;
else
reg &= ~mask;
bcm_perf_writel(reg, PERF_CKCTL_REG);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Maxime Bizon | 43 | 100.00% | 1 | 100.00% |
Total | 43 | 100.00% | 1 | 100.00% |
/*
* Ethernet MAC "misc" clock: dma clocks and main clock on 6348
*/
static void enet_misc_set(struct clk *clk, int enable)
{
u32 mask;
if (BCMCPU_IS_6338())
mask = CKCTL_6338_ENET_EN;
else if (BCMCPU_IS_6345())
mask = CKCTL_6345_ENET_EN;
else if (BCMCPU_IS_6348())
mask = CKCTL_6348_ENET_EN;
else
/* BCMCPU_IS_6358 */
mask = CKCTL_6358_EMUSB_EN;
bcm_hwclock_set(mask, enable);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Maxime Bizon | 59 | 100.00% | 1 | 100.00% |
Total | 59 | 100.00% | 1 | 100.00% |
static struct clk clk_enet_misc = {
.set = enet_misc_set,
};
/*
* Ethernet MAC clocks: only revelant on 6358, silently enable misc
* clocks
*/
static void enetx_set(struct clk *clk, int enable)
{
if (enable)
clk_enable_unlocked(&clk_enet_misc);
else
clk_disable_unlocked(&clk_enet_misc);
if (BCMCPU_IS_3368() || BCMCPU_IS_6358()) {
u32 mask;
if (clk->id == 0)
mask = CKCTL_6358_ENET0_EN;
else
mask = CKCTL_6358_ENET1_EN;
bcm_hwclock_set(mask, enable);
}
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Maxime Bizon | 65 | 95.59% | 1 | 50.00% |
Florian Fainelli | 3 | 4.41% | 1 | 50.00% |
Total | 68 | 100.00% | 2 | 100.00% |
static struct clk clk_enet0 = {
.id = 0,
.set = enetx_set,
};
static struct clk clk_enet1 = {
.id = 1,
.set = enetx_set,
};
/*
* Ethernet PHY clock
*/
static void ephy_set(struct clk *clk, int enable)
{
if (BCMCPU_IS_3368() || BCMCPU_IS_6358())
bcm_hwclock_set(CKCTL_6358_EPHY_EN, enable);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Maxime Bizon | 26 | 89.66% | 1 | 50.00% |
Florian Fainelli | 3 | 10.34% | 1 | 50.00% |
Total | 29 | 100.00% | 2 | 100.00% |
static struct clk clk_ephy = {
.set = ephy_set,
};
/*
* Ethernet switch clock
*/
static void enetsw_set(struct clk *clk, int enable)
{
if (BCMCPU_IS_6328())
bcm_hwclock_set(CKCTL_6328_ROBOSW_EN, enable);
else if (BCMCPU_IS_6362())
bcm_hwclock_set(CKCTL_6362_ROBOSW_EN, enable);
else if (BCMCPU_IS_6368())
bcm_hwclock_set(CKCTL_6368_ROBOSW_EN |
CKCTL_6368_SWPKT_USB_EN |
CKCTL_6368_SWPKT_SAR_EN,
enable);
else
return;
if (enable) {
/* reset switch core afer clock change */
bcm63xx_core_set_reset(BCM63XX_RESET_ENETSW, 1);
msleep(10);
bcm63xx_core_set_reset(BCM63XX_RESET_ENETSW, 0);
msleep(10);
}
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Maxime Bizon | 54 | 60.67% | 1 | 25.00% |
Jonas Gorski | 34 | 38.20% | 2 | 50.00% |
Florian Fainelli | 1 | 1.12% | 1 | 25.00% |
Total | 89 | 100.00% | 4 | 100.00% |
static struct clk clk_enetsw = {
.set = enetsw_set,
};
/*
* PCM clock
*/
static void pcm_set(struct clk *clk, int enable)
{
if (BCMCPU_IS_3368())
bcm_hwclock_set(CKCTL_3368_PCM_EN, enable);
if (BCMCPU_IS_6358())
bcm_hwclock_set(CKCTL_6358_PCM_EN, enable);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Maxime Bizon | 26 | 68.42% | 1 | 50.00% |
Florian Fainelli | 12 | 31.58% | 1 | 50.00% |
Total | 38 | 100.00% | 2 | 100.00% |
static struct clk clk_pcm = {
.set = pcm_set,
};
/*
* USB host clock
*/
static void usbh_set(struct clk *clk, int enable)
{
if (BCMCPU_IS_6328())
bcm_hwclock_set(CKCTL_6328_USBH_EN, enable);
else if (BCMCPU_IS_6348())
bcm_hwclock_set(CKCTL_6348_USBH_EN, enable);
else if (BCMCPU_IS_6362())
bcm_hwclock_set(CKCTL_6362_USBH_EN, enable);
else if (BCMCPU_IS_6368())
bcm_hwclock_set(CKCTL_6368_USBH_EN, enable);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Maxime Bizon | 38 | 58.46% | 2 | 40.00% |
Jonas Gorski | 13 | 20.00% | 1 | 20.00% |
Kevin Cernekee | 13 | 20.00% | 1 | 20.00% |
Florian Fainelli | 1 | 1.54% | 1 | 20.00% |
Total | 65 | 100.00% | 5 | 100.00% |
static struct clk clk_usbh = {
.set = usbh_set,
};
/*
* USB device clock
*/
static void usbd_set(struct clk *clk, int enable)
{
if (BCMCPU_IS_6328())
bcm_hwclock_set(CKCTL_6328_USBD_EN, enable);
else if (BCMCPU_IS_6362())
bcm_hwclock_set(CKCTL_6362_USBD_EN, enable);
else if (BCMCPU_IS_6368())
bcm_hwclock_set(CKCTL_6368_USBD_EN, enable);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Kevin Cernekee | 39 | 75.00% | 1 | 50.00% |
Jonas Gorski | 13 | 25.00% | 1 | 50.00% |
Total | 52 | 100.00% | 2 | 100.00% |
static struct clk clk_usbd = {
.set = usbd_set,
};
/*
* SPI clock
*/
static void spi_set(struct clk *clk, int enable)
{
u32 mask;
if (BCMCPU_IS_6338())
mask = CKCTL_6338_SPI_EN;
else if (BCMCPU_IS_6348())
mask = CKCTL_6348_SPI_EN;
else if (BCMCPU_IS_3368() || BCMCPU_IS_6358())
mask = CKCTL_6358_SPI_EN;
else if (BCMCPU_IS_6362())
mask = CKCTL_6362_SPI_EN;
else
/* BCMCPU_IS_6368 */
mask = CKCTL_6368_SPI_EN;
bcm_hwclock_set(mask, enable);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Maxime Bizon | 47 | 65.28% | 1 | 25.00% |
Florian Fainelli | 15 | 20.83% | 2 | 50.00% |
Jonas Gorski | 10 | 13.89% | 1 | 25.00% |
Total | 72 | 100.00% | 4 | 100.00% |
static struct clk clk_spi = {
.set = spi_set,
};
/*
* HSSPI clock
*/
static void hsspi_set(struct clk *clk, int enable)
{
u32 mask;
if (BCMCPU_IS_6328())
mask = CKCTL_6328_HSSPI_EN;
else if (BCMCPU_IS_6362())
mask = CKCTL_6362_HSSPI_EN;
else
return;
bcm_hwclock_set(mask, enable);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Jonas Gorski | 45 | 100.00% | 1 | 100.00% |
Total | 45 | 100.00% | 1 | 100.00% |
static struct clk clk_hsspi = {
.set = hsspi_set,
};
/*
* XTM clock
*/
static void xtm_set(struct clk *clk, int enable)
{
if (!BCMCPU_IS_6368())
return;
bcm_hwclock_set(CKCTL_6368_SAR_EN |
CKCTL_6368_SWPKT_SAR_EN, enable);
if (enable) {
/* reset sar core afer clock change */
bcm63xx_core_set_reset(BCM63XX_RESET_SAR, 1);
mdelay(1);
bcm63xx_core_set_reset(BCM63XX_RESET_SAR, 0);
mdelay(1);
}
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Maxime Bizon | 54 | 88.52% | 1 | 33.33% |
Jonas Gorski | 6 | 9.84% | 1 | 33.33% |
Florian Fainelli | 1 | 1.64% | 1 | 33.33% |
Total | 61 | 100.00% | 3 | 100.00% |
static struct clk clk_xtm = {
.set = xtm_set,
};
/*
* IPsec clock
*/
static void ipsec_set(struct clk *clk, int enable)
{
if (BCMCPU_IS_6362())
bcm_hwclock_set(CKCTL_6362_IPSEC_EN, enable);
else if (BCMCPU_IS_6368())
bcm_hwclock_set(CKCTL_6368_IPSEC_EN, enable);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Florian Fainelli | 21 | 53.85% | 1 | 50.00% |
Jonas Gorski | 18 | 46.15% | 1 | 50.00% |
Total | 39 | 100.00% | 2 | 100.00% |
static struct clk clk_ipsec = {
.set = ipsec_set,
};
/*
* PCIe clock
*/
static void pcie_set(struct clk *clk, int enable)
{
if (BCMCPU_IS_6328())
bcm_hwclock_set(CKCTL_6328_PCIE_EN, enable);
else if (BCMCPU_IS_6362())
bcm_hwclock_set(CKCTL_6362_PCIE_EN, enable);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Jonas Gorski | 39 | 100.00% | 2 | 100.00% |
Total | 39 | 100.00% | 2 | 100.00% |
static struct clk clk_pcie = {
.set = pcie_set,
};
/*
* Internal peripheral clock
*/
static struct clk clk_periph = {
.rate = (50 * 1000 * 1000),
};
/*
* Linux clock API implementation
*/
int clk_enable(struct clk *clk)
{
mutex_lock(&clocks_mutex);
clk_enable_unlocked(clk);
mutex_unlock(&clocks_mutex);
return 0;
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Maxime Bizon | 30 | 100.00% | 1 | 100.00% |
Total | 30 | 100.00% | 1 | 100.00% |
EXPORT_SYMBOL(clk_enable);
void clk_disable(struct clk *clk)
{
if (!clk)
return;
mutex_lock(&clocks_mutex);
clk_disable_unlocked(clk);
mutex_unlock(&clocks_mutex);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Maxime Bizon | 27 | 81.82% | 1 | 50.00% |
Masahiro Yamada | 6 | 18.18% | 1 | 50.00% |
Total | 33 | 100.00% | 2 | 100.00% |
EXPORT_SYMBOL(clk_disable);
unsigned long clk_get_rate(struct clk *clk)
{
if (!clk)
return 0;
return clk->rate;
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Maxime Bizon | 16 | 66.67% | 1 | 50.00% |
Jonas Gorski | 8 | 33.33% | 1 | 50.00% |
Total | 24 | 100.00% | 2 | 100.00% |
EXPORT_SYMBOL(clk_get_rate);
int clk_set_rate(struct clk *clk, unsigned long rate)
{
return 0;
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Markos Chandras | 17 | 100.00% | 1 | 100.00% |
Total | 17 | 100.00% | 1 | 100.00% |
EXPORT_SYMBOL_GPL(clk_set_rate);
long clk_round_rate(struct clk *clk, unsigned long rate)
{
return 0;
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Markos Chandras | 17 | 100.00% | 1 | 100.00% |
Total | 17 | 100.00% | 1 | 100.00% |
EXPORT_SYMBOL_GPL(clk_round_rate);
struct clk *clk_get(struct device *dev, const char *id)
{
if (!strcmp(id, "enet0"))
return &clk_enet0;
if (!strcmp(id, "enet1"))
return &clk_enet1;
if (!strcmp(id, "enetsw"))
return &clk_enetsw;
if (!strcmp(id, "ephy"))
return &clk_ephy;
if (!strcmp(id, "usbh"))
return &clk_usbh;
if (!strcmp(id, "usbd"))
return &clk_usbd;
if (!strcmp(id, "spi"))
return &clk_spi;
if (!strcmp(id, "hsspi"))
return &clk_hsspi;
if (!strcmp(id, "xtm"))
return &clk_xtm;
if (!strcmp(id, "periph"))
return &clk_periph;
if ((BCMCPU_IS_3368() || BCMCPU_IS_6358()) && !strcmp(id, "pcm"))
return &clk_pcm;
if ((BCMCPU_IS_6362() || BCMCPU_IS_6368()) && !strcmp(id, "ipsec"))
return &clk_ipsec;
if ((BCMCPU_IS_6328() || BCMCPU_IS_6362()) && !strcmp(id, "pcie"))
return &clk_pcie;
return ERR_PTR(-ENOENT);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Maxime Bizon | 153 | 66.52% | 2 | 25.00% |
Jonas Gorski | 41 | 17.83% | 3 | 37.50% |
Florian Fainelli | 22 | 9.57% | 2 | 25.00% |
Kevin Cernekee | 14 | 6.09% | 1 | 12.50% |
Total | 230 | 100.00% | 8 | 100.00% |
EXPORT_SYMBOL(clk_get);
void clk_put(struct clk *clk)
{
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Maxime Bizon | 9 | 100.00% | 1 | 100.00% |
Total | 9 | 100.00% | 1 | 100.00% |
EXPORT_SYMBOL(clk_put);
#define HSSPI_PLL_HZ_6328 133333333
#define HSSPI_PLL_HZ_6362 400000000
static int __init bcm63xx_clk_init(void)
{
switch (bcm63xx_get_cpu_id()) {
case BCM6328_CPU_ID:
clk_hsspi.rate = HSSPI_PLL_HZ_6328;
break;
case BCM6362_CPU_ID:
clk_hsspi.rate = HSSPI_PLL_HZ_6362;
break;
}
return 0;
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Jonas Gorski | 39 | 100.00% | 1 | 100.00% |
Total | 39 | 100.00% | 1 | 100.00% |
arch_initcall(bcm63xx_clk_init);
Overall Contributors
Person | Tokens | Prop | Commits | CommitProp |
Maxime Bizon | 920 | 62.16% | 2 | 11.11% |
Jonas Gorski | 335 | 22.64% | 8 | 44.44% |
Florian Fainelli | 92 | 6.22% | 4 | 22.22% |
Kevin Cernekee | 79 | 5.34% | 1 | 5.56% |
Markos Chandras | 44 | 2.97% | 1 | 5.56% |
Masahiro Yamada | 6 | 0.41% | 1 | 5.56% |
Paul Gortmaker | 4 | 0.27% | 1 | 5.56% |
Total | 1480 | 100.00% | 18 | 100.00% |
Information contained on this website is for historical information purposes only and does not indicate or represent copyright ownership.