/* * Copyright 2004-2009 Analog Devices Inc. * * Licensed under the GPL-2 or later. */ #ifndef __ARCH_BLACKFIN_CACHE_H #define __ARCH_BLACKFIN_CACHE_H #include <linux/linkage.h> /* for asmlinkage */ /* * Bytes per L1 cache line * Blackfin loads 32 bytes for cache */ #define L1_CACHE_SHIFT 5 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) #define SMP_CACHE_BYTES L1_CACHE_BYTES #define ARCH_DMA_MINALIGN L1_CACHE_BYTES #ifdef CONFIG_SMP #define __cacheline_aligned #else #define ____cacheline_aligned /* * Put cacheline_aliged data to L1 data memory */ #ifdef CONFIG_CACHELINE_ALIGNED_L1 #define __cacheline_aligned \ __attribute__((__aligned__(L1_CACHE_BYTES), \ __section__(".data_l1.cacheline_aligned"))) #endif #endif /* * largest L1 which this arch supports */ #define L1_CACHE_SHIFT_MAX 5 #if defined(CONFIG_SMP) && \ !defined(CONFIG_BFIN_CACHE_COHERENT) # if defined(CONFIG_BFIN_EXTMEM_ICACHEABLE) || defined(CONFIG_BFIN_L2_ICACHEABLE) # define __ARCH_SYNC_CORE_ICACHE # endif # if defined(CONFIG_BFIN_EXTMEM_DCACHEABLE) || defined(CONFIG_BFIN_L2_DCACHEABLE) # define __ARCH_SYNC_CORE_DCACHE # endif #ifndef __ASSEMBLY__ asmlinkage void __raw_smp_mark_barrier_asm(void); asmlinkage void __raw_smp_check_barrier_asm(void);
static inline void smp_mark_barrier(void) { __raw_smp_mark_barrier_asm(); }Contributors
Person | Tokens | Prop | Commits | CommitProp |
Graf Yang | 12 | 100.00% | 1 | 100.00% |
Total | 12 | 100.00% | 1 | 100.00% |
Person | Tokens | Prop | Commits | CommitProp |
Graf Yang | 12 | 100.00% | 1 | 100.00% |
Total | 12 | 100.00% | 1 | 100.00% |
Person | Tokens | Prop | Commits | CommitProp |
Graf Yang | 85 | 52.80% | 2 | 22.22% |
Bryan Wu | 37 | 22.98% | 1 | 11.11% |
Sonic Zhang | 20 | 12.42% | 1 | 11.11% |
Jie Zhang | 10 | 6.21% | 1 | 11.11% |
FUJITA Tomonori | 4 | 2.48% | 2 | 22.22% |
Mike Frysinger | 4 | 2.48% | 1 | 11.11% |
Robin Getz | 1 | 0.62% | 1 | 11.11% |
Total | 161 | 100.00% | 9 | 100.00% |