Release 4.7 arch/s390/include/asm/atomic.h
/*
* Copyright IBM Corp. 1999, 2009
* Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
* Denis Joseph Barrow,
* Arnd Bergmann <arndb@de.ibm.com>,
*
* Atomic operations that C can't guarantee us.
* Useful for resource counting etc.
* s390 uses 'Compare And Swap' for atomicity in SMP environment.
*
*/
#ifndef __ARCH_S390_ATOMIC__
#define __ARCH_S390_ATOMIC__
#include <linux/compiler.h>
#include <linux/types.h>
#include <asm/barrier.h>
#include <asm/cmpxchg.h>
#define ATOMIC_INIT(i) { (i) }
#define __ATOMIC_NO_BARRIER "\n"
#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
#define __ATOMIC_OR "lao"
#define __ATOMIC_AND "lan"
#define __ATOMIC_ADD "laa"
#define __ATOMIC_XOR "lax"
#define __ATOMIC_BARRIER "bcr 14,0\n"
#define __ATOMIC_LOOP(ptr, op_val, op_string, __barrier) \
({ \
int old_val; \
\
typecheck(atomic_t *, ptr); \
asm volatile( \
op_string " %0,%2,%1\n" \
__barrier \
: "=d" (old_val), "+Q" ((ptr)->counter) \
: "d" (op_val) \
: "cc", "memory"); \
old_val; \
})
#else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
#define __ATOMIC_OR "or"
#define __ATOMIC_AND "nr"
#define __ATOMIC_ADD "ar"
#define __ATOMIC_XOR "xr"
#define __ATOMIC_BARRIER "\n"
#define __ATOMIC_LOOP(ptr, op_val, op_string, __barrier) \
({ \
int old_val, new_val; \
\
typecheck(atomic_t *, ptr); \
asm volatile( \
" l %0,%2\n" \
"0: lr %1,%0\n" \
op_string " %1,%3\n" \
" cs %0,%1,%2\n" \
" jl 0b" \
: "=&d" (old_val), "=&d" (new_val), "+Q" ((ptr)->counter)\
: "d" (op_val) \
: "cc", "memory"); \
old_val; \
})
#endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
static inline int atomic_read(const atomic_t *v)
{
int c;
asm volatile(
" l %0,%1\n"
: "=d" (c) : "Q" (v->counter));
return c;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| heiko carstens | heiko carstens | 17 | 80.95% | 2 | 66.67% |
| pre-git | pre-git | 4 | 19.05% | 1 | 33.33% |
| Total | 21 | 100.00% | 3 | 100.00% |
static inline void atomic_set(atomic_t *v, int i)
{
asm volatile(
" st %1,%0\n"
: "=Q" (v->counter) : "d" (i));
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| heiko carstens | heiko carstens | 11 | 64.71% | 2 | 66.67% |
| pre-git | pre-git | 6 | 35.29% | 1 | 33.33% |
| Total | 17 | 100.00% | 3 | 100.00% |
static inline int atomic_add_return(int i, atomic_t *v)
{
return __ATOMIC_LOOP(v, i, __ATOMIC_ADD, __ATOMIC_BARRIER) + i;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| pre-git | pre-git | 14 | 50.00% | 1 | 16.67% |
| heiko carstens | heiko carstens | 7 | 25.00% | 3 | 50.00% |
| linus torvalds | linus torvalds | 6 | 21.43% | 1 | 16.67% |
| martin schwidefsky | martin schwidefsky | 1 | 3.57% | 1 | 16.67% |
| Total | 28 | 100.00% | 6 | 100.00% |
static inline void atomic_add(int i, atomic_t *v)
{
#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
if (__builtin_constant_p(i) && (i > -129) && (i < 128)) {
asm volatile(
"asi %0,%1\n"
: "+Q" (v->counter)
: "i" (i)
: "cc", "memory");
return;
}
#endif
__ATOMIC_LOOP(v, i, __ATOMIC_ADD, __ATOMIC_NO_BARRIER);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| heiko carstens | heiko carstens | 52 | 92.86% | 2 | 50.00% |
| pre-git | pre-git | 3 | 5.36% | 1 | 25.00% |
| martin schwidefsky | martin schwidefsky | 1 | 1.79% | 1 | 25.00% |
| Total | 56 | 100.00% | 4 | 100.00% |
#define atomic_add_negative(_i, _v) (atomic_add_return(_i, _v) < 0)
#define atomic_inc(_v) atomic_add(1, _v)
#define atomic_inc_return(_v) atomic_add_return(1, _v)
#define atomic_inc_and_test(_v) (atomic_add_return(1, _v) == 0)
#define atomic_sub(_i, _v) atomic_add(-(int)(_i), _v)
#define atomic_sub_return(_i, _v) atomic_add_return(-(int)(_i), _v)
#define atomic_sub_and_test(_i, _v) (atomic_sub_return(_i, _v) == 0)
#define atomic_dec(_v) atomic_sub(1, _v)
#define atomic_dec_return(_v) atomic_sub_return(1, _v)
#define atomic_dec_and_test(_v) (atomic_sub_return(1, _v) == 0)
#define ATOMIC_OP(op, OP) \
static inline void atomic_##op(int i, atomic_t *v) \
{ \
__ATOMIC_LOOP(v, i, __ATOMIC_##OP, __ATOMIC_NO_BARRIER); \
}
ATOMIC_OP(and, AND)
ATOMIC_OP(or, OR)
ATOMIC_OP(xor, XOR)
#undef ATOMIC_OP
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
{
asm volatile(
" cs %0,%2,%1"
: "+d" (old), "+Q" (v->counter)
: "d" (new)
: "cc", "memory");
return old;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| martin schwidefsky | martin schwidefsky | 11 | 47.83% | 3 | 50.00% |
| pre-git | pre-git | 10 | 43.48% | 1 | 16.67% |
| heiko carstens | heiko carstens | 2 | 8.70% | 2 | 33.33% |
| Total | 23 | 100.00% | 6 | 100.00% |
static inline int __atomic_add_unless(atomic_t *v, int a, int u)
{
int c, old;
c = atomic_read(v);
for (;;) {
if (unlikely(c == u))
break;
old = atomic_cmpxchg(v, c, c + a);
if (likely(old == c))
break;
c = old;
}
return c;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| martin schwidefsky | martin schwidefsky | 33 | 43.42% | 1 | 16.67% |
| nick piggin | nick piggin | 23 | 30.26% | 1 | 16.67% |
| pre-git | pre-git | 9 | 11.84% | 1 | 16.67% |
| linus torvalds | linus torvalds | 9 | 11.84% | 1 | 16.67% |
| arun sharma | arun sharma | 1 | 1.32% | 1 | 16.67% |
| heiko carstens | heiko carstens | 1 | 1.32% | 1 | 16.67% |
| Total | 76 | 100.00% | 6 | 100.00% |
#undef __ATOMIC_LOOP
#define ATOMIC64_INIT(i) { (i) }
#define __ATOMIC64_NO_BARRIER "\n"
#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
#define __ATOMIC64_OR "laog"
#define __ATOMIC64_AND "lang"
#define __ATOMIC64_ADD "laag"
#define __ATOMIC64_XOR "laxg"
#define __ATOMIC64_BARRIER "bcr 14,0\n"
#define __ATOMIC64_LOOP(ptr, op_val, op_string, __barrier) \
({ \
long long old_val; \
\
typecheck(atomic64_t *, ptr); \
asm volatile( \
op_string " %0,%2,%1\n" \
__barrier \
: "=d" (old_val), "+Q" ((ptr)->counter) \
: "d" (op_val) \
: "cc", "memory"); \
old_val; \
})
#else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
#define __ATOMIC64_OR "ogr"
#define __ATOMIC64_AND "ngr"
#define __ATOMIC64_ADD "agr"
#define __ATOMIC64_XOR "xgr"
#define __ATOMIC64_BARRIER "\n"
#define __ATOMIC64_LOOP(ptr, op_val, op_string, __barrier) \
({ \
long long old_val, new_val; \
\
typecheck(atomic64_t *, ptr); \
asm volatile( \
" lg %0,%2\n" \
"0: lgr %1,%0\n" \
op_string " %1,%3\n" \
" csg %0,%1,%2\n" \
" jl 0b" \
: "=&d" (old_val), "=&d" (new_val), "+Q" ((ptr)->counter)\
: "d" (op_val) \
: "cc", "memory"); \
old_val; \
})
#endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
static inline long long atomic64_read(const atomic64_t *v)
{
long long c;
asm volatile(
" lg %0,%1\n"
: "=d" (c) : "Q" (v->counter));
return c;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| heiko carstens | heiko carstens | 19 | 82.61% | 2 | 66.67% |
| martin schwidefsky | martin schwidefsky | 4 | 17.39% | 1 | 33.33% |
| Total | 23 | 100.00% | 3 | 100.00% |
static inline void atomic64_set(atomic64_t *v, long long i)
{
asm volatile(
" stg %1,%0\n"
: "=Q" (v->counter) : "d" (i));
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| heiko carstens | heiko carstens | 12 | 66.67% | 2 | 50.00% |
| martin schwidefsky | martin schwidefsky | 6 | 33.33% | 2 | 50.00% |
| Total | 18 | 100.00% | 4 | 100.00% |
static inline long long atomic64_add_return(long long i, atomic64_t *v)
{
return __ATOMIC64_LOOP(v, i, __ATOMIC64_ADD, __ATOMIC64_BARRIER) + i;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| martin schwidefsky | martin schwidefsky | 22 | 73.33% | 2 | 33.33% |
| heiko carstens | heiko carstens | 7 | 23.33% | 3 | 50.00% |
| pre-git | pre-git | 1 | 3.33% | 1 | 16.67% |
| Total | 30 | 100.00% | 6 | 100.00% |
static inline void atomic64_add(long long i, atomic64_t *v)
{
#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
if (__builtin_constant_p(i) && (i > -129) && (i < 128)) {
asm volatile(
"agsi %0,%1\n"
: "+Q" (v->counter)
: "i" (i)
: "cc", "memory");
return;
}
#endif
__ATOMIC64_LOOP(v, i, __ATOMIC64_ADD, __ATOMIC64_NO_BARRIER);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| heiko carstens | heiko carstens | 57 | 100.00% | 1 | 100.00% |
| Total | 57 | 100.00% | 1 | 100.00% |
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
static inline long long atomic64_cmpxchg(atomic64_t *v,
long long old, long long new)
{
asm volatile(
" csg %0,%2,%1"
: "+d" (old), "+Q" (v->counter)
: "d" (new)
: "cc", "memory");
return old;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| martin schwidefsky | martin schwidefsky | 17 | 65.38% | 3 | 50.00% |
| pre-git | pre-git | 7 | 26.92% | 1 | 16.67% |
| heiko carstens | heiko carstens | 2 | 7.69% | 2 | 33.33% |
| Total | 26 | 100.00% | 6 | 100.00% |
#define ATOMIC64_OP(op, OP) \
static inline void atomic64_##op(long i, atomic64_t *v) \
{ \
__ATOMIC64_LOOP(v, i, __ATOMIC64_##OP, __ATOMIC64_NO_BARRIER); \
}
ATOMIC64_OP(and, AND)
ATOMIC64_OP(or, OR)
ATOMIC64_OP(xor, XOR)
#undef ATOMIC64_OP
#undef __ATOMIC64_LOOP
static inline int atomic64_add_unless(atomic64_t *v, long long i, long long u)
{
long long c, old;
c = atomic64_read(v);
for (;;) {
if (unlikely(c == u))
break;
old = atomic64_cmpxchg(v, c, c + i);
if (likely(old == c))
break;
c = old;
}
return c != u;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| heiko carstens | heiko carstens | 52 | 64.20% | 3 | 50.00% |
| martin schwidefsky | martin schwidefsky | 18 | 22.22% | 1 | 16.67% |
| nick piggin | nick piggin | 11 | 13.58% | 2 | 33.33% |
| Total | 81 | 100.00% | 6 | 100.00% |
static inline long long atomic64_dec_if_positive(atomic64_t *v)
{
long long c, old, dec;
c = atomic64_read(v);
for (;;) {
dec = c - 1;
if (unlikely(dec < 0))
break;
old = atomic64_cmpxchg((v), c, dec);
if (likely(old == c))
break;
c = old;
}
return dec;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| heiko carstens | heiko carstens | 80 | 100.00% | 1 | 100.00% |
| Total | 80 | 100.00% | 1 | 100.00% |
#define atomic64_add_negative(_i, _v) (atomic64_add_return(_i, _v) < 0)
#define atomic64_inc(_v) atomic64_add(1, _v)
#define atomic64_inc_return(_v) atomic64_add_return(1, _v)
#define atomic64_inc_and_test(_v) (atomic64_add_return(1, _v) == 0)
#define atomic64_sub_return(_i, _v) atomic64_add_return(-(long long)(_i), _v)
#define atomic64_sub(_i, _v) atomic64_add(-(long long)(_i), _v)
#define atomic64_sub_and_test(_i, _v) (atomic64_sub_return(_i, _v) == 0)
#define atomic64_dec(_v) atomic64_sub(1, _v)
#define atomic64_dec_return(_v) atomic64_sub_return(1, _v)
#define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0)
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
#endif /* __ARCH_S390_ATOMIC__ */
Overall Contributors
| Person | Tokens | Prop | Commits | CommitProp |
| heiko carstens | heiko carstens | 543 | 55.24% | 11 | 40.74% |
| martin schwidefsky | martin schwidefsky | 197 | 20.04% | 5 | 18.52% |
| peter zijlstra | peter zijlstra | 78 | 7.93% | 1 | 3.70% |
| pre-git | pre-git | 76 | 7.73% | 1 | 3.70% |
| nick piggin | nick piggin | 39 | 3.97% | 2 | 7.41% |
| linus torvalds | linus torvalds | 24 | 2.44% | 1 | 3.70% |
| mathieu desnoyers | mathieu desnoyers | 9 | 0.92% | 1 | 3.70% |
| ingo molnar | ingo molnar | 9 | 0.92% | 1 | 3.70% |
| matthew wilcox | matthew wilcox | 3 | 0.31% | 1 | 3.70% |
| dave jones | dave jones | 3 | 0.31% | 1 | 3.70% |
| david howells | david howells | 1 | 0.10% | 1 | 3.70% |
| arun sharma | arun sharma | 1 | 0.10% | 1 | 3.70% |
| Total | 983 | 100.00% | 27 | 100.00% |
Information contained on this website is for historical information purposes only and does not indicate or represent copyright ownership.