Release 4.10 arch/x86/include/asm/tsc.h
/*
* x86 TSC related functions
*/
#ifndef _ASM_X86_TSC_H
#define _ASM_X86_TSC_H
#include <asm/processor.h>
#define NS_SCALE 10
/* 2^10, carefully chosen */
#define US_SCALE 32
/* 2^32, arbitralrily chosen */
/*
* Standard way to access the cycle counter.
*/
typedef unsigned long long cycles_t;
extern unsigned int cpu_khz;
extern unsigned int tsc_khz;
extern void disable_TSC(void);
static inline cycles_t get_cycles(void)
{
#ifndef CONFIG_X86_TSC
if (!boot_cpu_has(X86_FEATURE_TSC))
return 0;
#endif
return rdtsc();
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
andi kleen | andi kleen | 10 | 34.48% | 2 | 28.57% |
andres salomon | andres salomon | 8 | 27.59% | 1 | 14.29% |
glauber de oliveira costa | glauber de oliveira costa | 5 | 17.24% | 1 | 14.29% |
borislav petkov | borislav petkov | 4 | 13.79% | 1 | 14.29% |
andy lutomirski | andy lutomirski | 1 | 3.45% | 1 | 14.29% |
ingo molnar | ingo molnar | 1 | 3.45% | 1 | 14.29% |
| Total | 29 | 100.00% | 7 | 100.00% |
extern struct system_counterval_t convert_art_to_tsc(u64 art);
extern void tsc_init(void);
extern void mark_tsc_unstable(char *reason);
extern int unsynchronized_tsc(void);
extern int check_tsc_unstable(void);
extern unsigned long native_calibrate_cpu(void);
extern unsigned long native_calibrate_tsc(void);
extern unsigned long long native_sched_clock_from_tsc(u64 tsc);
extern int tsc_clocksource_reliable;
/*
* Boot-time check whether the TSCs are synchronized across
* all CPUs/cores:
*/
#ifdef CONFIG_X86_TSC
extern bool tsc_store_and_check_tsc_adjust(bool bootcpu);
extern void tsc_verify_tsc_adjust(bool resume);
extern void check_tsc_sync_source(int cpu);
extern void check_tsc_sync_target(void);
#else
static inline bool tsc_store_and_check_tsc_adjust(bool bootcpu) { return false; }
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
thomas gleixner | thomas gleixner | 13 | 100.00% | 4 | 100.00% |
| Total | 13 | 100.00% | 4 | 100.00% |
static inline void tsc_verify_tsc_adjust(bool resume) { }
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
thomas gleixner | thomas gleixner | 9 | 100.00% | 2 | 100.00% |
| Total | 9 | 100.00% | 2 | 100.00% |
static inline void check_tsc_sync_source(int cpu) { }
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
thomas gleixner | thomas gleixner | 9 | 100.00% | 1 | 100.00% |
| Total | 9 | 100.00% | 1 | 100.00% |
static inline void check_tsc_sync_target(void) { }
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
thomas gleixner | thomas gleixner | 8 | 100.00% | 1 | 100.00% |
| Total | 8 | 100.00% | 1 | 100.00% |
#endif
extern int notsc_setup(char *);
extern void tsc_save_sched_clock_state(void);
extern void tsc_restore_sched_clock_state(void);
unsigned long cpu_khz_from_msr(void);
#endif /* _ASM_X86_TSC_H */
Overall Contributors
| Person | Tokens | Prop | Commits | CommitProp |
thomas gleixner | thomas gleixner | 99 | 39.29% | 12 | 37.50% |
andres salomon | andres salomon | 62 | 24.60% | 1 | 3.12% |
andi kleen | andi kleen | 20 | 7.94% | 3 | 9.38% |
suresh siddha | suresh siddha | 16 | 6.35% | 2 | 6.25% |
glauber de oliveira costa | glauber de oliveira costa | 12 | 4.76% | 2 | 6.25% |
len brown | len brown | 9 | 3.57% | 2 | 6.25% |
christopher s. hall | christopher s. hall | 8 | 3.17% | 1 | 3.12% |
rusty russell | rusty russell | 6 | 2.38% | 1 | 3.12% |
john stultz | john stultz | 5 | 1.98% | 2 | 6.25% |
bin gao | bin gao | 4 | 1.59% | 1 | 3.12% |
borislav petkov | borislav petkov | 4 | 1.59% | 1 | 3.12% |
h. peter anvin | h. peter anvin | 3 | 1.19% | 1 | 3.12% |
marcelo tosatti | marcelo tosatti | 2 | 0.79% | 1 | 3.12% |
andy lutomirski | andy lutomirski | 1 | 0.40% | 1 | 3.12% |
ingo molnar | ingo molnar | 1 | 0.40% | 1 | 3.12% |
| Total | 252 | 100.00% | 32 | 100.00% |
Information contained on this website is for historical information purposes only and does not indicate or represent copyright ownership.