Contributors: 5
Author |
Tokens |
Token Proportion |
Commits |
Commit Proportion |
Akinobu Mita |
215 |
63.80% |
1 |
20.00% |
Will Deacon |
110 |
32.64% |
1 |
20.00% |
Jiri Slaby |
9 |
2.67% |
1 |
20.00% |
Arnd Bergmann |
2 |
0.59% |
1 |
20.00% |
Greg Kroah-Hartman |
1 |
0.30% |
1 |
20.00% |
Total |
337 |
|
5 |
|
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef _ASM_GENERIC_BITOPS_ATOMIC_H_
#define _ASM_GENERIC_BITOPS_ATOMIC_H_
#include <linux/atomic.h>
#include <linux/compiler.h>
#include <asm/barrier.h>
/*
* Implementation of atomic bitops using atomic-fetch ops.
* See Documentation/atomic_bitops.txt for details.
*/
static inline void set_bit(unsigned int nr, volatile unsigned long *p)
{
p += BIT_WORD(nr);
atomic_long_or(BIT_MASK(nr), (atomic_long_t *)p);
}
static inline void clear_bit(unsigned int nr, volatile unsigned long *p)
{
p += BIT_WORD(nr);
atomic_long_andnot(BIT_MASK(nr), (atomic_long_t *)p);
}
static inline void change_bit(unsigned int nr, volatile unsigned long *p)
{
p += BIT_WORD(nr);
atomic_long_xor(BIT_MASK(nr), (atomic_long_t *)p);
}
static inline int test_and_set_bit(unsigned int nr, volatile unsigned long *p)
{
long old;
unsigned long mask = BIT_MASK(nr);
p += BIT_WORD(nr);
if (READ_ONCE(*p) & mask)
return 1;
old = atomic_long_fetch_or(mask, (atomic_long_t *)p);
return !!(old & mask);
}
static inline int test_and_clear_bit(unsigned int nr, volatile unsigned long *p)
{
long old;
unsigned long mask = BIT_MASK(nr);
p += BIT_WORD(nr);
if (!(READ_ONCE(*p) & mask))
return 0;
old = atomic_long_fetch_andnot(mask, (atomic_long_t *)p);
return !!(old & mask);
}
static inline int test_and_change_bit(unsigned int nr, volatile unsigned long *p)
{
long old;
unsigned long mask = BIT_MASK(nr);
p += BIT_WORD(nr);
old = atomic_long_fetch_xor(mask, (atomic_long_t *)p);
return !!(old & mask);
}
#endif /* _ASM_GENERIC_BITOPS_ATOMIC_H */