Release 4.7 arch/powerpc/include/asm/cputime.h
/*
* Definitions for measuring cputime on powerpc machines.
*
* Copyright (C) 2006 Paul Mackerras, IBM Corp.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version
* 2 of the License, or (at your option) any later version.
*
* If we have CONFIG_VIRT_CPU_ACCOUNTING_NATIVE, we measure cpu time in
* the same units as the timebase. Otherwise we measure cpu time
* in jiffies using the generic definitions.
*/
#ifndef __POWERPC_CPUTIME_H
#define __POWERPC_CPUTIME_H
#ifndef CONFIG_VIRT_CPU_ACCOUNTING_NATIVE
#include <asm-generic/cputime.h>
#ifdef __KERNEL__
static inline void setup_cputime_one_jiffy(void) { }
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| stanislaw gruszka | stanislaw gruszka | 8 | 100.00% | 1 | 100.00% |
| Total | 8 | 100.00% | 1 | 100.00% |
#endif
#else
#include <linux/types.h>
#include <linux/time.h>
#include <asm/div64.h>
#include <asm/time.h>
#include <asm/param.h>
typedef u64 __nocast cputime_t;
typedef u64 __nocast cputime64_t;
#define cmpxchg_cputime(ptr, old, new) cmpxchg(ptr, old, new)
#ifdef __KERNEL__
/*
* One jiffy in timebase units computed during initialization
*/
extern cputime_t cputime_one_jiffy;
/*
* Convert cputime <-> jiffies
*/
extern u64 __cputime_jiffies_factor;
DECLARE_PER_CPU(unsigned long, cputime_last_delta);
DECLARE_PER_CPU(unsigned long, cputime_scaled_last_delta);
static inline unsigned long cputime_to_jiffies(const cputime_t ct)
{
return mulhdu((__force u64) ct, __cputime_jiffies_factor);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| paul mackerras | paul mackerras | 20 | 83.33% | 1 | 50.00% |
| martin schwidefsky | martin schwidefsky | 4 | 16.67% | 1 | 50.00% |
| Total | 24 | 100.00% | 2 | 100.00% |
/* Estimate the scaled cputime by scaling the real cputime based on
* the last scaled to real ratio */
static inline cputime_t cputime_to_scaled(const cputime_t ct)
{
if (cpu_has_feature(CPU_FTR_SPURR) &&
__this_cpu_read(cputime_last_delta))
return (__force u64) ct *
__this_cpu_read(cputime_scaled_last_delta) /
__this_cpu_read(cputime_last_delta);
return ct;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| michael neuling | michael neuling | 36 | 83.72% | 1 | 33.33% |
| martin schwidefsky | martin schwidefsky | 4 | 9.30% | 1 | 33.33% |
| christoph lameter | christoph lameter | 3 | 6.98% | 1 | 33.33% |
| Total | 43 | 100.00% | 3 | 100.00% |
static inline cputime_t jiffies_to_cputime(const unsigned long jif)
{
u64 ct;
unsigned long sec;
/* have to be a little careful about overflow */
ct = jif % HZ;
sec = jif / HZ;
if (ct) {
ct *= tb_ticks_per_sec;
do_div(ct, HZ);
}
if (sec)
ct += (cputime_t) sec * tb_ticks_per_sec;
return (__force cputime_t) ct;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| paul mackerras | paul mackerras | 64 | 92.75% | 1 | 50.00% |
| martin schwidefsky | martin schwidefsky | 5 | 7.25% | 1 | 50.00% |
| Total | 69 | 100.00% | 2 | 100.00% |
static inline void setup_cputime_one_jiffy(void)
{
cputime_one_jiffy = jiffies_to_cputime(1);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| stanislaw gruszka | stanislaw gruszka | 16 | 100.00% | 1 | 100.00% |
| Total | 16 | 100.00% | 1 | 100.00% |
static inline cputime64_t jiffies64_to_cputime64(const u64 jif)
{
u64 ct;
u64 sec;
/* have to be a little careful about overflow */
ct = jif % HZ;
sec = jif / HZ;
if (ct) {
ct *= tb_ticks_per_sec;
do_div(ct, HZ);
}
if (sec)
ct += (u64) sec * tb_ticks_per_sec;
return (__force cputime64_t) ct;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| david woodhouse | david woodhouse | 61 | 91.04% | 1 | 50.00% |
| martin schwidefsky | martin schwidefsky | 6 | 8.96% | 1 | 50.00% |
| Total | 67 | 100.00% | 2 | 100.00% |
static inline u64 cputime64_to_jiffies64(const cputime_t ct)
{
return mulhdu((__force u64) ct, __cputime_jiffies_factor);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| paul mackerras | paul mackerras | 19 | 82.61% | 1 | 50.00% |
| martin schwidefsky | martin schwidefsky | 4 | 17.39% | 1 | 50.00% |
| Total | 23 | 100.00% | 2 | 100.00% |
/*
* Convert cputime <-> microseconds
*/
extern u64 __cputime_usec_factor;
static inline unsigned long cputime_to_usecs(const cputime_t ct)
{
return mulhdu((__force u64) ct, __cputime_usec_factor);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| paul mackerras | paul mackerras | 18 | 75.00% | 1 | 25.00% |
| martin schwidefsky | martin schwidefsky | 4 | 16.67% | 1 | 25.00% |
| andreas schwab | andreas schwab | 1 | 4.17% | 1 | 25.00% |
| michael holzheu | michael holzheu | 1 | 4.17% | 1 | 25.00% |
| Total | 24 | 100.00% | 4 | 100.00% |
static inline cputime_t usecs_to_cputime(const unsigned long us)
{
u64 ct;
unsigned long sec;
/* have to be a little careful about overflow */
ct = us % 1000000;
sec = us / 1000000;
if (ct) {
ct *= tb_ticks_per_sec;
do_div(ct, 1000000);
}
if (sec)
ct += (cputime_t) sec * tb_ticks_per_sec;
return (__force cputime_t) ct;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| paul mackerras | paul mackerras | 57 | 82.61% | 1 | 25.00% |
| michael holzheu | michael holzheu | 6 | 8.70% | 1 | 25.00% |
| martin schwidefsky | martin schwidefsky | 5 | 7.25% | 1 | 25.00% |
| andreas schwab | andreas schwab | 1 | 1.45% | 1 | 25.00% |
| Total | 69 | 100.00% | 4 | 100.00% |
#define usecs_to_cputime64(us) usecs_to_cputime(us)
/*
* Convert cputime <-> seconds
*/
extern u64 __cputime_sec_factor;
static inline unsigned long cputime_to_secs(const cputime_t ct)
{
return mulhdu((__force u64) ct, __cputime_sec_factor);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| paul mackerras | paul mackerras | 20 | 83.33% | 1 | 50.00% |
| martin schwidefsky | martin schwidefsky | 4 | 16.67% | 1 | 50.00% |
| Total | 24 | 100.00% | 2 | 100.00% |
static inline cputime_t secs_to_cputime(const unsigned long sec)
{
return (__force cputime_t)((u64) sec * tb_ticks_per_sec);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| paul mackerras | paul mackerras | 19 | 73.08% | 1 | 50.00% |
| martin schwidefsky | martin schwidefsky | 7 | 26.92% | 1 | 50.00% |
| Total | 26 | 100.00% | 2 | 100.00% |
/*
* Convert cputime <-> timespec
*/
static inline void cputime_to_timespec(const cputime_t ct, struct timespec *p)
{
u64 x = (__force u64) ct;
unsigned int frac;
frac = do_div(x, tb_ticks_per_sec);
p->tv_sec = x;
x = (u64) frac * 1000000000;
do_div(x, tb_ticks_per_sec);
p->tv_nsec = x;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| paul mackerras | paul mackerras | 62 | 93.94% | 1 | 50.00% |
| martin schwidefsky | martin schwidefsky | 4 | 6.06% | 1 | 50.00% |
| Total | 66 | 100.00% | 2 | 100.00% |
static inline cputime_t timespec_to_cputime(const struct timespec *p)
{
u64 ct;
ct = (u64) p->tv_nsec * tb_ticks_per_sec;
do_div(ct, 1000000000);
return (__force cputime_t)(ct + (u64) p->tv_sec * tb_ticks_per_sec);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| paul mackerras | paul mackerras | 45 | 86.54% | 1 | 50.00% |
| martin schwidefsky | martin schwidefsky | 7 | 13.46% | 1 | 50.00% |
| Total | 52 | 100.00% | 2 | 100.00% |
/*
* Convert cputime <-> timeval
*/
static inline void cputime_to_timeval(const cputime_t ct, struct timeval *p)
{
u64 x = (__force u64) ct;
unsigned int frac;
frac = do_div(x, tb_ticks_per_sec);
p->tv_sec = x;
x = (u64) frac * 1000000;
do_div(x, tb_ticks_per_sec);
p->tv_usec = x;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| paul mackerras | paul mackerras | 62 | 93.94% | 1 | 50.00% |
| martin schwidefsky | martin schwidefsky | 4 | 6.06% | 1 | 50.00% |
| Total | 66 | 100.00% | 2 | 100.00% |
static inline cputime_t timeval_to_cputime(const struct timeval *p)
{
u64 ct;
ct = (u64) p->tv_usec * tb_ticks_per_sec;
do_div(ct, 1000000);
return (__force cputime_t)(ct + (u64) p->tv_sec * tb_ticks_per_sec);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| paul mackerras | paul mackerras | 45 | 86.54% | 1 | 50.00% |
| martin schwidefsky | martin schwidefsky | 7 | 13.46% | 1 | 50.00% |
| Total | 52 | 100.00% | 2 | 100.00% |
/*
* Convert cputime <-> clock_t (units of 1/USER_HZ seconds)
*/
extern u64 __cputime_clockt_factor;
static inline unsigned long cputime_to_clock_t(const cputime_t ct)
{
return mulhdu((__force u64) ct, __cputime_clockt_factor);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| paul mackerras | paul mackerras | 20 | 83.33% | 1 | 50.00% |
| martin schwidefsky | martin schwidefsky | 4 | 16.67% | 1 | 50.00% |
| Total | 24 | 100.00% | 2 | 100.00% |
static inline cputime_t clock_t_to_cputime(const unsigned long clk)
{
u64 ct;
unsigned long sec;
/* have to be a little careful about overflow */
ct = clk % USER_HZ;
sec = clk / USER_HZ;
if (ct) {
ct *= tb_ticks_per_sec;
do_div(ct, USER_HZ);
}
if (sec)
ct += (u64) sec * tb_ticks_per_sec;
return (__force cputime_t) ct;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| paul mackerras | paul mackerras | 63 | 91.30% | 1 | 50.00% |
| martin schwidefsky | martin schwidefsky | 6 | 8.70% | 1 | 50.00% |
| Total | 69 | 100.00% | 2 | 100.00% |
#define cputime64_to_clock_t(ct) cputime_to_clock_t((cputime_t)(ct))
static inline void arch_vtime_task_switch(struct task_struct *tsk) { }
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| frederic weisbecker | frederic weisbecker | 11 | 100.00% | 1 | 100.00% |
| Total | 11 | 100.00% | 1 | 100.00% |
#endif /* __KERNEL__ */
#endif /* CONFIG_VIRT_CPU_ACCOUNTING_NATIVE */
#endif /* __POWERPC_CPUTIME_H */
Overall Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| paul mackerras | paul mackerras | 585 | 68.26% | 1 | 7.14% |
| martin schwidefsky | martin schwidefsky | 77 | 8.98% | 1 | 7.14% |
| david woodhouse | david woodhouse | 61 | 7.12% | 1 | 7.14% |
| michael neuling | michael neuling | 51 | 5.95% | 1 | 7.14% |
| stanislaw gruszka | stanislaw gruszka | 34 | 3.97% | 1 | 7.14% |
| frederic weisbecker | frederic weisbecker | 14 | 1.63% | 2 | 14.29% |
| rik van riel | rik van riel | 11 | 1.28% | 1 | 7.14% |
| andreas schwab | andreas schwab | 10 | 1.17% | 2 | 14.29% |
| michael holzheu | michael holzheu | 8 | 0.93% | 1 | 7.14% |
| christoph lameter | christoph lameter | 3 | 0.35% | 1 | 7.14% |
| bartlomiej zolnierkiewicz | bartlomiej zolnierkiewicz | 2 | 0.23% | 1 | 7.14% |
| stephen rothwell | stephen rothwell | 1 | 0.12% | 1 | 7.14% |
| Total | 857 | 100.00% | 14 | 100.00% |
Information contained on this website is for historical information purposes only and does not indicate or represent copyright ownership.