Release 4.14 arch/mips/alchemy/devboards/bcsr.c
/*
* bcsr.h -- Db1xxx/Pb1xxx Devboard CPLD registers ("BCSR") abstraction.
*
* All Alchemy development boards (except, of course, the weird PB1000)
* have a few registers in a CPLD with standardised layout; they mostly
* only differ in base address.
* All registers are 16bits wide with 32bit spacing.
*/
#include <linux/interrupt.h>
#include <linux/irqchip/chained_irq.h>
#include <linux/init.h>
#include <linux/export.h>
#include <linux/spinlock.h>
#include <linux/irq.h>
#include <asm/addrspace.h>
#include <asm/io.h>
#include <asm/mach-db1x00/bcsr.h>
static struct bcsr_reg {
void __iomem *raddr;
spinlock_t lock;
} bcsr_regs[BCSR_CNT];
static void __iomem *bcsr_virt;
/* KSEG1 addr of BCSR base */
static int bcsr_csc_base;
/* linux-irq of first cascaded irq */
void __init bcsr_init(unsigned long bcsr1_phys, unsigned long bcsr2_phys)
{
int i;
bcsr1_phys = KSEG1ADDR(CPHYSADDR(bcsr1_phys));
bcsr2_phys = KSEG1ADDR(CPHYSADDR(bcsr2_phys));
bcsr_virt = (void __iomem *)bcsr1_phys;
for (i = 0; i < BCSR_CNT; i++) {
if (i >= BCSR_HEXLEDS)
bcsr_regs[i].raddr = (void __iomem *)bcsr2_phys +
(0x04 * (i - BCSR_HEXLEDS));
else
bcsr_regs[i].raddr = (void __iomem *)bcsr1_phys +
(0x04 * i);
spin_lock_init(&bcsr_regs[i].lock);
}
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Manuel Lauss | 123 | 100.00% | 2 | 100.00% |
Total | 123 | 100.00% | 2 | 100.00% |
unsigned short bcsr_read(enum bcsr_id reg)
{
unsigned short r;
unsigned long flags;
spin_lock_irqsave(&bcsr_regs[reg].lock, flags);
r = __raw_readw(bcsr_regs[reg].raddr);
spin_unlock_irqrestore(&bcsr_regs[reg].lock, flags);
return r;
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Manuel Lauss | 59 | 100.00% | 1 | 100.00% |
Total | 59 | 100.00% | 1 | 100.00% |
EXPORT_SYMBOL_GPL(bcsr_read);
void bcsr_write(enum bcsr_id reg, unsigned short val)
{
unsigned long flags;
spin_lock_irqsave(&bcsr_regs[reg].lock, flags);
__raw_writew(val, bcsr_regs[reg].raddr);
wmb();
spin_unlock_irqrestore(&bcsr_regs[reg].lock, flags);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Manuel Lauss | 58 | 100.00% | 1 | 100.00% |
Total | 58 | 100.00% | 1 | 100.00% |
EXPORT_SYMBOL_GPL(bcsr_write);
void bcsr_mod(enum bcsr_id reg, unsigned short clr, unsigned short set)
{
unsigned short r;
unsigned long flags;
spin_lock_irqsave(&bcsr_regs[reg].lock, flags);
r = __raw_readw(bcsr_regs[reg].raddr);
r &= ~clr;
r |= set;
__raw_writew(r, bcsr_regs[reg].raddr);
wmb();
spin_unlock_irqrestore(&bcsr_regs[reg].lock, flags);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Manuel Lauss | 87 | 100.00% | 1 | 100.00% |
Total | 87 | 100.00% | 1 | 100.00% |
EXPORT_SYMBOL_GPL(bcsr_mod);
/*
* DB1200/PB1200 CPLD IRQ muxer
*/
static void bcsr_csc_handler(struct irq_desc *d)
{
unsigned short bisr = __raw_readw(bcsr_virt + BCSR_REG_INTSTAT);
struct irq_chip *chip = irq_desc_get_chip(d);
chained_irq_enter(chip, d);
generic_handle_irq(bcsr_csc_base + __ffs(bisr));
chained_irq_exit(chip, d);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Manuel Lauss | 38 | 67.86% | 2 | 66.67% |
Thomas Gleixner | 18 | 32.14% | 1 | 33.33% |
Total | 56 | 100.00% | 3 | 100.00% |
static void bcsr_irq_mask(struct irq_data *d)
{
unsigned short v = 1 << (d->irq - bcsr_csc_base);
__raw_writew(v, bcsr_virt + BCSR_REG_MASKCLR);
wmb();
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Manuel Lauss | 30 | 81.08% | 1 | 50.00% |
Thomas Gleixner | 7 | 18.92% | 1 | 50.00% |
Total | 37 | 100.00% | 2 | 100.00% |
static void bcsr_irq_maskack(struct irq_data *d)
{
unsigned short v = 1 << (d->irq - bcsr_csc_base);
__raw_writew(v, bcsr_virt + BCSR_REG_MASKCLR);
__raw_writew(v, bcsr_virt + BCSR_REG_INTSTAT); /* ack */
wmb();
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Manuel Lauss | 40 | 85.11% | 1 | 50.00% |
Thomas Gleixner | 7 | 14.89% | 1 | 50.00% |
Total | 47 | 100.00% | 2 | 100.00% |
static void bcsr_irq_unmask(struct irq_data *d)
{
unsigned short v = 1 << (d->irq - bcsr_csc_base);
__raw_writew(v, bcsr_virt + BCSR_REG_MASKSET);
wmb();
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Manuel Lauss | 30 | 81.08% | 1 | 50.00% |
Thomas Gleixner | 7 | 18.92% | 1 | 50.00% |
Total | 37 | 100.00% | 2 | 100.00% |
static struct irq_chip bcsr_irq_type = {
.name = "CPLD",
.irq_mask = bcsr_irq_mask,
.irq_mask_ack = bcsr_irq_maskack,
.irq_unmask = bcsr_irq_unmask,
};
void __init bcsr_init_irq(int csc_start, int csc_end, int hook_irq)
{
unsigned int irq;
/* mask & enable & ack all */
__raw_writew(0xffff, bcsr_virt + BCSR_REG_MASKCLR);
__raw_writew(0xffff, bcsr_virt + BCSR_REG_INTSET);
__raw_writew(0xffff, bcsr_virt + BCSR_REG_INTSTAT);
wmb();
bcsr_csc_base = csc_start;
for (irq = csc_start; irq <= csc_end; irq++)
irq_set_chip_and_handler_name(irq, &bcsr_irq_type,
handle_level_irq, "level");
irq_set_chained_handler(hook_irq, bcsr_csc_handler);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Manuel Lauss | 84 | 97.67% | 2 | 66.67% |
Thomas Gleixner | 2 | 2.33% | 1 | 33.33% |
Total | 86 | 100.00% | 3 | 100.00% |
Overall Contributors
Person | Tokens | Prop | Commits | CommitProp |
Manuel Lauss | 637 | 92.19% | 4 | 44.44% |
Thomas Gleixner | 47 | 6.80% | 3 | 33.33% |
Paul Gortmaker | 4 | 0.58% | 1 | 11.11% |
David Howells | 3 | 0.43% | 1 | 11.11% |
Total | 691 | 100.00% | 9 | 100.00% |
Information contained on this website is for historical information purposes only and does not indicate or represent copyright ownership.