Author | Tokens | Token Proportion | Commits | Commit Proportion |
---|---|---|---|---|
Marc Zyngier | 789 | 80.84% | 2 | 18.18% |
Ilkka Koskinen | 86 | 8.81% | 1 | 9.09% |
Rob Herring | 68 | 6.97% | 3 | 27.27% |
Mark Rutland | 12 | 1.23% | 1 | 9.09% |
Reiji Watanabe | 7 | 0.72% | 1 | 9.09% |
Fuad Tabba | 6 | 0.61% | 1 | 9.09% |
Quentin Perret | 5 | 0.51% | 1 | 9.09% |
Geert Uytterhoeven | 3 | 0.31% | 1 | 9.09% |
Total | 976 | 11 |
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280
/* SPDX-License-Identifier: GPL-2.0 */ /* * Copyright (C) 2012 ARM Ltd. */ #ifndef __ASM_PMUV3_H #define __ASM_PMUV3_H #include <asm/cp15.h> #include <asm/cputype.h> #define PMCCNTR __ACCESS_CP15_64(0, c9) #define PMCR __ACCESS_CP15(c9, 0, c12, 0) #define PMCNTENSET __ACCESS_CP15(c9, 0, c12, 1) #define PMCNTENCLR __ACCESS_CP15(c9, 0, c12, 2) #define PMOVSR __ACCESS_CP15(c9, 0, c12, 3) #define PMSELR __ACCESS_CP15(c9, 0, c12, 5) #define PMCEID0 __ACCESS_CP15(c9, 0, c12, 6) #define PMCEID1 __ACCESS_CP15(c9, 0, c12, 7) #define PMXEVTYPER __ACCESS_CP15(c9, 0, c13, 1) #define PMXEVCNTR __ACCESS_CP15(c9, 0, c13, 2) #define PMUSERENR __ACCESS_CP15(c9, 0, c14, 0) #define PMINTENSET __ACCESS_CP15(c9, 0, c14, 1) #define PMINTENCLR __ACCESS_CP15(c9, 0, c14, 2) #define PMCEID2 __ACCESS_CP15(c9, 0, c14, 4) #define PMCEID3 __ACCESS_CP15(c9, 0, c14, 5) #define PMMIR __ACCESS_CP15(c9, 0, c14, 6) #define PMCCFILTR __ACCESS_CP15(c14, 0, c15, 7) #define PMEVCNTR0 __ACCESS_CP15(c14, 0, c8, 0) #define PMEVCNTR1 __ACCESS_CP15(c14, 0, c8, 1) #define PMEVCNTR2 __ACCESS_CP15(c14, 0, c8, 2) #define PMEVCNTR3 __ACCESS_CP15(c14, 0, c8, 3) #define PMEVCNTR4 __ACCESS_CP15(c14, 0, c8, 4) #define PMEVCNTR5 __ACCESS_CP15(c14, 0, c8, 5) #define PMEVCNTR6 __ACCESS_CP15(c14, 0, c8, 6) #define PMEVCNTR7 __ACCESS_CP15(c14, 0, c8, 7) #define PMEVCNTR8 __ACCESS_CP15(c14, 0, c9, 0) #define PMEVCNTR9 __ACCESS_CP15(c14, 0, c9, 1) #define PMEVCNTR10 __ACCESS_CP15(c14, 0, c9, 2) #define PMEVCNTR11 __ACCESS_CP15(c14, 0, c9, 3) #define PMEVCNTR12 __ACCESS_CP15(c14, 0, c9, 4) #define PMEVCNTR13 __ACCESS_CP15(c14, 0, c9, 5) #define PMEVCNTR14 __ACCESS_CP15(c14, 0, c9, 6) #define PMEVCNTR15 __ACCESS_CP15(c14, 0, c9, 7) #define PMEVCNTR16 __ACCESS_CP15(c14, 0, c10, 0) #define PMEVCNTR17 __ACCESS_CP15(c14, 0, c10, 1) #define PMEVCNTR18 __ACCESS_CP15(c14, 0, c10, 2) #define PMEVCNTR19 __ACCESS_CP15(c14, 0, c10, 3) #define PMEVCNTR20 __ACCESS_CP15(c14, 0, c10, 4) #define PMEVCNTR21 __ACCESS_CP15(c14, 0, c10, 5) #define PMEVCNTR22 __ACCESS_CP15(c14, 0, c10, 6) #define PMEVCNTR23 __ACCESS_CP15(c14, 0, c10, 7) #define PMEVCNTR24 __ACCESS_CP15(c14, 0, c11, 0) #define PMEVCNTR25 __ACCESS_CP15(c14, 0, c11, 1) #define PMEVCNTR26 __ACCESS_CP15(c14, 0, c11, 2) #define PMEVCNTR27 __ACCESS_CP15(c14, 0, c11, 3) #define PMEVCNTR28 __ACCESS_CP15(c14, 0, c11, 4) #define PMEVCNTR29 __ACCESS_CP15(c14, 0, c11, 5) #define PMEVCNTR30 __ACCESS_CP15(c14, 0, c11, 6) #define PMEVTYPER0 __ACCESS_CP15(c14, 0, c12, 0) #define PMEVTYPER1 __ACCESS_CP15(c14, 0, c12, 1) #define PMEVTYPER2 __ACCESS_CP15(c14, 0, c12, 2) #define PMEVTYPER3 __ACCESS_CP15(c14, 0, c12, 3) #define PMEVTYPER4 __ACCESS_CP15(c14, 0, c12, 4) #define PMEVTYPER5 __ACCESS_CP15(c14, 0, c12, 5) #define PMEVTYPER6 __ACCESS_CP15(c14, 0, c12, 6) #define PMEVTYPER7 __ACCESS_CP15(c14, 0, c12, 7) #define PMEVTYPER8 __ACCESS_CP15(c14, 0, c13, 0) #define PMEVTYPER9 __ACCESS_CP15(c14, 0, c13, 1) #define PMEVTYPER10 __ACCESS_CP15(c14, 0, c13, 2) #define PMEVTYPER11 __ACCESS_CP15(c14, 0, c13, 3) #define PMEVTYPER12 __ACCESS_CP15(c14, 0, c13, 4) #define PMEVTYPER13 __ACCESS_CP15(c14, 0, c13, 5) #define PMEVTYPER14 __ACCESS_CP15(c14, 0, c13, 6) #define PMEVTYPER15 __ACCESS_CP15(c14, 0, c13, 7) #define PMEVTYPER16 __ACCESS_CP15(c14, 0, c14, 0) #define PMEVTYPER17 __ACCESS_CP15(c14, 0, c14, 1) #define PMEVTYPER18 __ACCESS_CP15(c14, 0, c14, 2) #define PMEVTYPER19 __ACCESS_CP15(c14, 0, c14, 3) #define PMEVTYPER20 __ACCESS_CP15(c14, 0, c14, 4) #define PMEVTYPER21 __ACCESS_CP15(c14, 0, c14, 5) #define PMEVTYPER22 __ACCESS_CP15(c14, 0, c14, 6) #define PMEVTYPER23 __ACCESS_CP15(c14, 0, c14, 7) #define PMEVTYPER24 __ACCESS_CP15(c14, 0, c15, 0) #define PMEVTYPER25 __ACCESS_CP15(c14, 0, c15, 1) #define PMEVTYPER26 __ACCESS_CP15(c14, 0, c15, 2) #define PMEVTYPER27 __ACCESS_CP15(c14, 0, c15, 3) #define PMEVTYPER28 __ACCESS_CP15(c14, 0, c15, 4) #define PMEVTYPER29 __ACCESS_CP15(c14, 0, c15, 5) #define PMEVTYPER30 __ACCESS_CP15(c14, 0, c15, 6) #define RETURN_READ_PMEVCNTRN(n) \ return read_sysreg(PMEVCNTR##n) static inline unsigned long read_pmevcntrn(int n) { PMEVN_SWITCH(n, RETURN_READ_PMEVCNTRN); return 0; } #define WRITE_PMEVCNTRN(n) \ write_sysreg(val, PMEVCNTR##n) static inline void write_pmevcntrn(int n, unsigned long val) { PMEVN_SWITCH(n, WRITE_PMEVCNTRN); } #define WRITE_PMEVTYPERN(n) \ write_sysreg(val, PMEVTYPER##n) static inline void write_pmevtypern(int n, unsigned long val) { PMEVN_SWITCH(n, WRITE_PMEVTYPERN); } static inline unsigned long read_pmmir(void) { return read_sysreg(PMMIR); } static inline u32 read_pmuver(void) { /* PMUVers is not a signed field */ u32 dfr0 = read_cpuid_ext(CPUID_EXT_DFR0); return (dfr0 >> 24) & 0xf; } static inline bool pmuv3_has_icntr(void) { /* FEAT_PMUv3_ICNTR not accessible for 32-bit */ return false; } static inline void write_pmcr(u32 val) { write_sysreg(val, PMCR); } static inline u32 read_pmcr(void) { return read_sysreg(PMCR); } static inline void write_pmselr(u32 val) { write_sysreg(val, PMSELR); } static inline void write_pmccntr(u64 val) { write_sysreg(val, PMCCNTR); } static inline u64 read_pmccntr(void) { return read_sysreg(PMCCNTR); } static inline void write_pmicntr(u64 val) {} static inline u64 read_pmicntr(void) { return 0; } static inline void write_pmcntenset(u32 val) { write_sysreg(val, PMCNTENSET); } static inline void write_pmcntenclr(u32 val) { write_sysreg(val, PMCNTENCLR); } static inline void write_pmintenset(u32 val) { write_sysreg(val, PMINTENSET); } static inline void write_pmintenclr(u32 val) { write_sysreg(val, PMINTENCLR); } static inline void write_pmccfiltr(u32 val) { write_sysreg(val, PMCCFILTR); } static inline void write_pmicfiltr(u64 val) {} static inline u64 read_pmicfiltr(void) { return 0; } static inline void write_pmovsclr(u32 val) { write_sysreg(val, PMOVSR); } static inline u32 read_pmovsclr(void) { return read_sysreg(PMOVSR); } static inline void write_pmuserenr(u32 val) { write_sysreg(val, PMUSERENR); } static inline void write_pmuacr(u64 val) {} static inline void kvm_set_pmu_events(u32 set, struct perf_event_attr *attr) {} static inline void kvm_clr_pmu_events(u32 clr) {} static inline bool kvm_pmu_counter_deferred(struct perf_event_attr *attr) { return false; } static inline bool kvm_set_pmuserenr(u64 val) { return false; } static inline void kvm_vcpu_pmu_resync_el0(void) {} /* PMU Version in DFR Register */ #define ARMV8_PMU_DFR_VER_NI 0 #define ARMV8_PMU_DFR_VER_V3P1 0x4 #define ARMV8_PMU_DFR_VER_V3P4 0x5 #define ARMV8_PMU_DFR_VER_V3P5 0x6 #define ARMV8_PMU_DFR_VER_V3P9 0x9 #define ARMV8_PMU_DFR_VER_IMP_DEF 0xF static inline bool pmuv3_implemented(int pmuver) { return !(pmuver == ARMV8_PMU_DFR_VER_IMP_DEF || pmuver == ARMV8_PMU_DFR_VER_NI); } static inline bool is_pmuv3p4(int pmuver) { return pmuver >= ARMV8_PMU_DFR_VER_V3P4; } static inline bool is_pmuv3p5(int pmuver) { return pmuver >= ARMV8_PMU_DFR_VER_V3P5; } static inline bool is_pmuv3p9(int pmuver) { return pmuver >= ARMV8_PMU_DFR_VER_V3P9; } static inline u64 read_pmceid0(void) { u64 val = read_sysreg(PMCEID0); if (read_pmuver() >= ARMV8_PMU_DFR_VER_V3P1) val |= (u64)read_sysreg(PMCEID2) << 32; return val; } static inline u64 read_pmceid1(void) { u64 val = read_sysreg(PMCEID1); if (read_pmuver() >= ARMV8_PMU_DFR_VER_V3P1) val |= (u64)read_sysreg(PMCEID3) << 32; return val; } #endif
Information contained on this website is for historical information purposes only and does not indicate or represent copyright ownership.
Created with Cregit http://github.com/cregit/cregit
Version 2.0-RC1