/* SPDX-License-Identifier: GPL-2.0 */ #ifndef __UM_CACHE_H #define __UM_CACHE_H #if defined(CONFIG_UML_X86) && !defined(CONFIG_64BIT) # define L1_CACHE_SHIFT (CONFIG_X86_L1_CACHE_SHIFT) #elif defined(CONFIG_UML_X86) /* 64-bit */ # define L1_CACHE_SHIFT 6 /* Should be 7 on Intel */ #else /* XXX: this was taken from x86, now it's completely random. Luckily only * affects SMP padding. */ # define L1_CACHE_SHIFT 5 #endif #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) #endifOverall Contributors
Person | Tokens | Prop | Commits | CommitProp |
Paolo 'Blaisorblade' Giarrusso | 36 | 72.00% | 1 | 33.33% |
Jeff Dike | 13 | 26.00% | 1 | 33.33% |
Greg Kroah-Hartman | 1 | 2.00% | 1 | 33.33% |
Total | 50 | 100.00% | 3 | 100.00% |