Contributors: 11
Author |
Tokens |
Token Proportion |
Commits |
Commit Proportion |
Linus Torvalds (pre-git) |
116 |
43.77% |
8 |
40.00% |
David S. Miller |
52 |
19.62% |
1 |
5.00% |
Anna-Maria Gleixner |
32 |
12.08% |
1 |
5.00% |
Nicholas Piggin |
25 |
9.43% |
1 |
5.00% |
James Clark |
16 |
6.04% |
1 |
5.00% |
Linus Torvalds |
15 |
5.66% |
3 |
15.00% |
Thomas Gleixner |
4 |
1.51% |
1 |
5.00% |
Ingo Molnar |
2 |
0.75% |
1 |
5.00% |
Greg Kroah-Hartman |
1 |
0.38% |
1 |
5.00% |
Arun Sharma |
1 |
0.38% |
1 |
5.00% |
Paul Gortmaker |
1 |
0.38% |
1 |
5.00% |
Total |
265 |
|
20 |
|
// SPDX-License-Identifier: GPL-2.0
#include <linux/export.h>
#include <linux/spinlock.h>
#include <linux/atomic.h>
/*
* This is an implementation of the notion of "decrement a
* reference count, and return locked if it decremented to zero".
*
* NOTE NOTE NOTE! This is _not_ equivalent to
*
* if (atomic_dec_and_test(&atomic)) {
* spin_lock(&lock);
* return 1;
* }
* return 0;
*
* because the spin-lock and the decrement must be
* "atomic".
*/
int _atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
{
/* Subtract 1 from counter unless that drops it to 0 (ie. it was 1) */
if (atomic_add_unless(atomic, -1, 1))
return 0;
/* Otherwise do it the slow way */
spin_lock(lock);
if (atomic_dec_and_test(atomic))
return 1;
spin_unlock(lock);
return 0;
}
EXPORT_SYMBOL(_atomic_dec_and_lock);
int _atomic_dec_and_lock_irqsave(atomic_t *atomic, spinlock_t *lock,
unsigned long *flags)
{
/* Subtract 1 from counter unless that drops it to 0 (ie. it was 1) */
if (atomic_add_unless(atomic, -1, 1))
return 0;
/* Otherwise do it the slow way */
spin_lock_irqsave(lock, *flags);
if (atomic_dec_and_test(atomic))
return 1;
spin_unlock_irqrestore(lock, *flags);
return 0;
}
EXPORT_SYMBOL(_atomic_dec_and_lock_irqsave);
int _atomic_dec_and_raw_lock(atomic_t *atomic, raw_spinlock_t *lock)
{
/* Subtract 1 from counter unless that drops it to 0 (ie. it was 1) */
if (atomic_add_unless(atomic, -1, 1))
return 0;
/* Otherwise do it the slow way */
raw_spin_lock(lock);
if (atomic_dec_and_test(atomic))
return 1;
raw_spin_unlock(lock);
return 0;
}
EXPORT_SYMBOL(_atomic_dec_and_raw_lock);
int _atomic_dec_and_raw_lock_irqsave(atomic_t *atomic, raw_spinlock_t *lock,
unsigned long *flags)
{
/* Subtract 1 from counter unless that drops it to 0 (ie. it was 1) */
if (atomic_add_unless(atomic, -1, 1))
return 0;
/* Otherwise do it the slow way */
raw_spin_lock_irqsave(lock, *flags);
if (atomic_dec_and_test(atomic))
return 1;
raw_spin_unlock_irqrestore(lock, *flags);
return 0;
}
EXPORT_SYMBOL(_atomic_dec_and_raw_lock_irqsave);