Author | Tokens | Token Proportion | Commits | Commit Proportion |
---|---|---|---|---|
Peter Zijlstra | 1234 | 35.28% | 6 | 18.75% |
Mark Rutland | 1014 | 28.99% | 11 | 34.38% |
Davidlohr Bueso A | 659 | 18.84% | 2 | 6.25% |
Will Deacon | 528 | 15.09% | 3 | 9.38% |
Eric Dumazet | 17 | 0.49% | 1 | 3.12% |
Al Viro | 12 | 0.34% | 1 | 3.12% |
Boqun Feng | 10 | 0.29% | 1 | 3.12% |
Arun Sharma | 7 | 0.20% | 3 | 9.38% |
Frédéric Weisbecker | 6 | 0.17% | 1 | 3.12% |
Shaohua Li | 6 | 0.17% | 1 | 3.12% |
Anton Blanchard | 4 | 0.11% | 1 | 3.12% |
Greg Kroah-Hartman | 1 | 0.03% | 1 | 3.12% |
Total | 3498 | 32 |
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317
/* SPDX-License-Identifier: GPL-2.0 */ /* Atomic operations usable in machine independent code */ #ifndef _LINUX_ATOMIC_H #define _LINUX_ATOMIC_H #include <linux/types.h> #include <asm/atomic.h> #include <asm/barrier.h> /* * Relaxed variants of xchg, cmpxchg and some atomic operations. * * We support four variants: * * - Fully ordered: The default implementation, no suffix required. * - Acquire: Provides ACQUIRE semantics, _acquire suffix. * - Release: Provides RELEASE semantics, _release suffix. * - Relaxed: No ordering guarantees, _relaxed suffix. * * For compound atomics performing both a load and a store, ACQUIRE * semantics apply only to the load and RELEASE semantics only to the * store portion of the operation. Note that a failed cmpxchg_acquire * does -not- imply any memory ordering constraints. * * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions. */ #ifndef atomic_read_acquire #define atomic_read_acquire(v) smp_load_acquire(&(v)->counter) #endif #ifndef atomic_set_release #define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i)) #endif /* * The idea here is to build acquire/release variants by adding explicit * barriers on top of the relaxed variant. In the case where the relaxed * variant is already fully ordered, no additional barriers are needed. * * If an architecture overrides __atomic_acquire_fence() it will probably * want to define smp_mb__after_spinlock(). */ #ifndef __atomic_acquire_fence #define __atomic_acquire_fence smp_mb__after_atomic #endif #ifndef __atomic_release_fence #define __atomic_release_fence smp_mb__before_atomic #endif #ifndef __atomic_pre_full_fence #define __atomic_pre_full_fence smp_mb__before_atomic #endif #ifndef __atomic_post_full_fence #define __atomic_post_full_fence smp_mb__after_atomic #endif #define __atomic_op_acquire(op, args...) \ ({ \ typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \ __atomic_acquire_fence(); \ __ret; \ }) #define __atomic_op_release(op, args...) \ ({ \ __atomic_release_fence(); \ op##_relaxed(args); \ }) #define __atomic_op_fence(op, args...) \ ({ \ typeof(op##_relaxed(args)) __ret; \ __atomic_pre_full_fence(); \ __ret = op##_relaxed(args); \ __atomic_post_full_fence(); \ __ret; \ }) /* atomic_add_return_relaxed */ #ifndef atomic_add_return_relaxed #define atomic_add_return_relaxed atomic_add_return #define atomic_add_return_acquire atomic_add_return #define atomic_add_return_release atomic_add_return #else /* atomic_add_return_relaxed */ #ifndef atomic_add_return_acquire #define atomic_add_return_acquire(...) \ __atomic_op_acquire(atomic_add_return, __VA_ARGS__) #endif #ifndef atomic_add_return_release #define atomic_add_return_release(...) \ __atomic_op_release(atomic_add_return, __VA_ARGS__) #endif #ifndef atomic_add_return #define atomic_add_return(...) \ __atomic_op_fence(atomic_add_return, __VA_ARGS__) #endif #endif /* atomic_add_return_relaxed */ #ifndef atomic_inc #define atomic_inc(v) atomic_add(1, (v)) #endif /* atomic_inc_return_relaxed */ #ifndef atomic_inc_return_relaxed #ifndef atomic_inc_return #define atomic_inc_return(v) atomic_add_return(1, (v)) #define atomic_inc_return_relaxed(v) atomic_add_return_relaxed(1, (v)) #define atomic_inc_return_acquire(v) atomic_add_return_acquire(1, (v)) #define atomic_inc_return_release(v) atomic_add_return_release(1, (v)) #else /* atomic_inc_return */ #define atomic_inc_return_relaxed atomic_inc_return #define atomic_inc_return_acquire atomic_inc_return #define atomic_inc_return_release atomic_inc_return #endif /* atomic_inc_return */ #else /* atomic_inc_return_relaxed */ #ifndef atomic_inc_return_acquire #define atomic_inc_return_acquire(...) \ __atomic_op_acquire(atomic_inc_return, __VA_ARGS__) #endif #ifndef atomic_inc_return_release #define atomic_inc_return_release(...) \ __atomic_op_release(atomic_inc_return, __VA_ARGS__) #endif #ifndef atomic_inc_return #define atomic_inc_return(...) \ __atomic_op_fence(atomic_inc_return, __VA_ARGS__) #endif #endif /* atomic_inc_return_relaxed */ /* atomic_sub_return_relaxed */ #ifndef atomic_sub_return_relaxed #define atomic_sub_return_relaxed atomic_sub_return #define atomic_sub_return_acquire atomic_sub_return #define atomic_sub_return_release atomic_sub_return #else /* atomic_sub_return_relaxed */ #ifndef atomic_sub_return_acquire #define atomic_sub_return_acquire(...) \ __atomic_op_acquire(atomic_sub_return, __VA_ARGS__) #endif #ifndef atomic_sub_return_release #define atomic_sub_return_release(...) \ __atomic_op_release(atomic_sub_return, __VA_ARGS__) #endif #ifndef atomic_sub_return #define atomic_sub_return(...) \ __atomic_op_fence(atomic_sub_return, __VA_ARGS__) #endif #endif /* atomic_sub_return_relaxed */ #ifndef atomic_dec #define atomic_dec(v) atomic_sub(1, (v)) #endif /* atomic_dec_return_relaxed */ #ifndef atomic_dec_return_relaxed #ifndef atomic_dec_return #define atomic_dec_return(v) atomic_sub_return(1, (v)) #define atomic_dec_return_relaxed(v) atomic_sub_return_relaxed(1, (v)) #define atomic_dec_return_acquire(v) atomic_sub_return_acquire(1, (v)) #define atomic_dec_return_release(v) atomic_sub_return_release(1, (v)) #else /* atomic_dec_return */ #define atomic_dec_return_relaxed atomic_dec_return #define atomic_dec_return_acquire atomic_dec_return #define atomic_dec_return_release atomic_dec_return #endif /* atomic_dec_return */ #else /* atomic_dec_return_relaxed */ #ifndef atomic_dec_return_acquire #define atomic_dec_return_acquire(...) \ __atomic_op_acquire(atomic_dec_return, __VA_ARGS__) #endif #ifndef atomic_dec_return_release #define atomic_dec_return_release(...) \ __atomic_op_release(atomic_dec_return, __VA_ARGS__) #endif #ifndef atomic_dec_return #define atomic_dec_return(...) \ __atomic_op_fence(atomic_dec_return, __VA_ARGS__) #endif #endif /* atomic_dec_return_relaxed */ /* atomic_fetch_add_relaxed */ #ifndef atomic_fetch_add_relaxed #define atomic_fetch_add_relaxed atomic_fetch_add #define atomic_fetch_add_acquire atomic_fetch_add #define atomic_fetch_add_release atomic_fetch_add #else /* atomic_fetch_add_relaxed */ #ifndef atomic_fetch_add_acquire #define atomic_fetch_add_acquire(...) \ __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__) #endif #ifndef atomic_fetch_add_release #define atomic_fetch_add_release(...) \ __atomic_op_release(atomic_fetch_add, __VA_ARGS__) #endif #ifndef atomic_fetch_add #define atomic_fetch_add(...) \ __atomic_op_fence(atomic_fetch_add, __VA_ARGS__) #endif #endif /* atomic_fetch_add_relaxed */ /* atomic_fetch_inc_relaxed */ #ifndef atomic_fetch_inc_relaxed #ifndef atomic_fetch_inc #define atomic_fetch_inc(v) atomic_fetch_add(1, (v)) #define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v)) #define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v)) #define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v)) #else /* atomic_fetch_inc */ #define atomic_fetch_inc_relaxed atomic_fetch_inc #define atomic_fetch_inc_acquire atomic_fetch_inc #define atomic_fetch_inc_release atomic_fetch_inc #endif /* atomic_fetch_inc */ #else /* atomic_fetch_inc_relaxed */ #ifndef atomic_fetch_inc_acquire #define atomic_fetch_inc_acquire(...) \ __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__) #endif #ifndef atomic_fetch_inc_release #define atomic_fetch_inc_release(...) \ __atomic_op_release(atomic_fetch_inc, __VA_ARGS__) #endif #ifndef atomic_fetch_inc #define atomic_fetch_inc(...) \ __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__) #endif #endif /* atomic_fetch_inc_relaxed */ /* atomic_fetch_sub_relaxed */ #ifndef atomic_fetch_sub_relaxed #define atomic_fetch_sub_relaxed atomic_fetch_sub #define atomic_fetch_sub_acquire atomic_fetch_sub #define atomic_fetch_sub_release atomic_fetch_sub #else /* atomic_fetch_sub_relaxed */ #ifndef atomic_fetch_sub_acquire #define atomic_fetch_sub_acquire(...) \ __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__) #endif #ifndef atomic_fetch_sub_release #define atomic_fetch_sub_release(...) \ __atomic_op_release(atomic_fetch_sub, __VA_ARGS__) #endif #ifndef atomic_fetch_sub #define atomic_fetch_sub(...) \ __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__) #endif #endif /* atomic_fetch_sub_relaxed */ /* atomic_fetch_dec_relaxed */ #ifndef atomic_fetch_dec_relaxed #ifndef atomic_fetch_dec #define atomic_fetch_dec(v) atomic_fetch_sub(1, (v)) #define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v)) #define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v)) #define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v)) #else /* atomic_fetch_dec */ #define atomic_fetch_dec_relaxed atomic_fetch_dec #define atomic_fetch_dec_acquire atomic_fetch_dec #define atomic_fetch_dec_release atomic_fetch_dec #endif /* atomic_fetch_dec */ #else /* atomic_fetch_dec_relaxed */ #ifndef atomic_fetch_dec_acquire #define atomic_fetch_dec_acquire(...) \ __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__) #endif #ifndef atomic_fetch_dec_release #define atomic_fetch_dec_release(...) \ __atomic_op_release(atomic_fetch_dec, __VA_ARGS__) #endif #ifndef atomic_fetch_dec #define atomic_fetch_dec(...) \ __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__) #endif #endif /* atomic_fetch_dec_relaxed */ /* atomic_fetch_or_relaxed */ #ifndef atomic_fetch_or_relaxed #define atomic_fetch_or_relaxed atomic_fetch_or #define atomic_fetch_or_acquire atomic_fetch_or #define atomic_fetch_or_release atomic_fetch_or #else /* atomic_fetch_or_relaxed */ #ifndef atomic_fetch_or_acquire #define atomic_fetch_or_acquire(...) \ __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__) #endif #ifndef atomic_fetch_or_release #define atomic_fetch_or_release(...) \ __atomic_op_release(atomic_fetch_or, __VA_ARGS__) #endif #ifndef atomic_fetch_or #define atomic_fetch_or(...) \ __atomic_op_fence(atomic_fetch_or, __VA_ARGS__) #endif #endif /* atomic_fetch_or_relaxed */ /* atomic_fetch_and_relaxed */ #ifndef atomic_fetch_and_relaxed #define atomic_fetch_and_relaxed atomic_fetch_and #define atomic_fetch_and_acquire atomic_fetch_and #define atomic_fetch_and_release atomic_fetch_and #else /* atomic_fetch_and_relaxed */ #ifndef atomic_fetch_and_acquire #define atomic_fetch_and_acquire(...) \ __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__) #endif #ifndef atomic_fetch_and_release #define atomic_fetch_and_release(...) \ __atomic_op_release(atomic_fetch_and, __VA_ARGS__) #endif #ifndef atomic_fetch_and #define atomic_fetch_and(...) \ __atomic_op_fence(atomic_fetch_and, __VA_ARGS__) #endif #endif /* atomic_fetch_and_relaxed */ #ifndef atomic_andnot #define atomic_andnot(i, v) atomic_and(~(int)(i), (v)) #endif #ifndef atomic_fetch_andnot_relaxed #ifndef atomic_fetch_andnot #define atomic_fetch_andnot(i, v) atomic_fetch_and(~(int)(i), (v)) #define atomic_fetch_andnot_relaxed(i, v) atomic_fetch_and_relaxed(~(int)(i), (v)) #define atomic_fetch_andnot_acquire(i, v) atomic_fetch_and_acquire(~(int)(i), (v)) #define atomic_fetch_andnot_release(i, v) atomic_fetch_and_release(~(int)(i), (v)) #else /* atomic_fetch_andnot */ #define atomic_fetch_andnot_relaxed atomic_fetch_andnot #define atomic_fetch_andnot_acquire atomic_fetch_andnot #define atomic_fetch_andnot_release atomic_fetch_andnot #endif /* atomic_fetch_andnot */ #else /* atomic_fetch_andnot_relaxed */ #ifndef atomic_fetch_andnot_acquire #define atomic_fetch_andnot_acquire(...) \ __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__) #endif #ifndef atomic_fetch_andnot_release #define atomic_fetch_andnot_release(...) \ __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__) #endif #ifndef atomic_fetch_andnot #define atomic_fetch_andnot(...) \ __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__) #endif #endif /* atomic_fetch_andnot_relaxed */ /* atomic_fetch_xor_relaxed */ #ifndef atomic_fetch_xor_relaxed #define atomic_fetch_xor_relaxed atomic_fetch_xor #define atomic_fetch_xor_acquire atomic_fetch_xor #define atomic_fetch_xor_release atomic_fetch_xor #else /* atomic_fetch_xor_relaxed */ #ifndef atomic_fetch_xor_acquire #define atomic_fetch_xor_acquire(...) \ __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__) #endif #ifndef atomic_fetch_xor_release #define atomic_fetch_xor_release(...) \ __atomic_op_release(atomic_fetch_xor, __VA_ARGS__) #endif #ifndef atomic_fetch_xor #define atomic_fetch_xor(...) \ __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__) #endif #endif /* atomic_fetch_xor_relaxed */ /* atomic_xchg_relaxed */ #ifndef atomic_xchg_relaxed #define atomic_xchg_relaxed atomic_xchg #define atomic_xchg_acquire atomic_xchg #define atomic_xchg_release atomic_xchg #else /* atomic_xchg_relaxed */ #ifndef atomic_xchg_acquire #define atomic_xchg_acquire(...) \ __atomic_op_acquire(atomic_xchg, __VA_ARGS__) #endif #ifndef atomic_xchg_release #define atomic_xchg_release(...) \ __atomic_op_release(atomic_xchg, __VA_ARGS__) #endif #ifndef atomic_xchg #define atomic_xchg(...) \ __atomic_op_fence(atomic_xchg, __VA_ARGS__) #endif #endif /* atomic_xchg_relaxed */ /* atomic_cmpxchg_relaxed */ #ifndef atomic_cmpxchg_relaxed #define atomic_cmpxchg_relaxed atomic_cmpxchg #define atomic_cmpxchg_acquire atomic_cmpxchg #define atomic_cmpxchg_release atomic_cmpxchg #else /* atomic_cmpxchg_relaxed */ #ifndef atomic_cmpxchg_acquire #define atomic_cmpxchg_acquire(...) \ __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__) #endif #ifndef atomic_cmpxchg_release #define atomic_cmpxchg_release(...) \ __atomic_op_release(atomic_cmpxchg, __VA_ARGS__) #endif #ifndef atomic_cmpxchg #define atomic_cmpxchg(...) \ __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__) #endif #endif /* atomic_cmpxchg_relaxed */ #ifndef atomic_try_cmpxchg #define __atomic_try_cmpxchg(type, _p, _po, _n) \ ({ \ typeof(_po) __po = (_po); \ typeof(*(_po)) __r, __o = *__po; \ __r = atomic_cmpxchg##type((_p), __o, (_n)); \ if (unlikely(__r != __o)) \ *__po = __r; \ likely(__r == __o); \ }) #define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n) #define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n) #define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n) #define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n) #else /* atomic_try_cmpxchg */ #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg #define atomic_try_cmpxchg_release atomic_try_cmpxchg #endif /* atomic_try_cmpxchg */ /* cmpxchg_relaxed */ #ifndef cmpxchg_relaxed #define cmpxchg_relaxed cmpxchg #define cmpxchg_acquire cmpxchg #define cmpxchg_release cmpxchg #else /* cmpxchg_relaxed */ #ifndef cmpxchg_acquire #define cmpxchg_acquire(...) \ __atomic_op_acquire(cmpxchg, __VA_ARGS__) #endif #ifndef cmpxchg_release #define cmpxchg_release(...) \ __atomic_op_release(cmpxchg, __VA_ARGS__) #endif #ifndef cmpxchg #define cmpxchg(...) \ __atomic_op_fence(cmpxchg, __VA_ARGS__) #endif #endif /* cmpxchg_relaxed */ /* cmpxchg64_relaxed */ #ifndef cmpxchg64_relaxed #define cmpxchg64_relaxed cmpxchg64 #define cmpxchg64_acquire cmpxchg64 #define cmpxchg64_release cmpxchg64 #else /* cmpxchg64_relaxed */ #ifndef cmpxchg64_acquire #define cmpxchg64_acquire(...) \ __atomic_op_acquire(cmpxchg64, __VA_ARGS__) #endif #ifndef cmpxchg64_release #define cmpxchg64_release(...) \ __atomic_op_release(cmpxchg64, __VA_ARGS__) #endif #ifndef cmpxchg64 #define cmpxchg64(...) \ __atomic_op_fence(cmpxchg64, __VA_ARGS__) #endif #endif /* cmpxchg64_relaxed */ /* xchg_relaxed */ #ifndef xchg_relaxed #define xchg_relaxed xchg #define xchg_acquire xchg #define xchg_release xchg #else /* xchg_relaxed */ #ifndef xchg_acquire #define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__) #endif #ifndef xchg_release #define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__) #endif #ifndef xchg #define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__) #endif #endif /* xchg_relaxed */ /** * atomic_fetch_add_unless - add unless the number is already a given value * @v: pointer of type atomic_t * @a: the amount to add to v... * @u: ...unless v is equal to u. * * Atomically adds @a to @v, if @v was not already @u. * Returns the original value of @v. */ #ifndef atomic_fetch_add_unless static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u) { int c = atomic_read(v); do { if (unlikely(c == u)) break; } while (!atomic_try_cmpxchg(v, &c, c + a)); return c; } #endif /** * atomic_add_unless - add unless the number is already a given value * @v: pointer of type atomic_t * @a: the amount to add to v... * @u: ...unless v is equal to u. * * Atomically adds @a to @v, if @v was not already @u. * Returns true if the addition was done. */ static inline bool atomic_add_unless(atomic_t *v, int a, int u) { return atomic_fetch_add_unless(v, a, u) != u; } /** * atomic_inc_not_zero - increment unless the number is zero * @v: pointer of type atomic_t * * Atomically increments @v by 1, if @v is non-zero. * Returns true if the increment was done. */ #ifndef atomic_inc_not_zero #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) #endif /** * atomic_inc_and_test - increment and test * @v: pointer of type atomic_t * * Atomically increments @v by 1 * and returns true if the result is zero, or false for all * other cases. */ #ifndef atomic_inc_and_test static inline bool atomic_inc_and_test(atomic_t *v) { return atomic_inc_return(v) == 0; } #endif /** * atomic_dec_and_test - decrement and test * @v: pointer of type atomic_t * * Atomically decrements @v by 1 and * returns true if the result is 0, or false for all other * cases. */ #ifndef atomic_dec_and_test static inline bool atomic_dec_and_test(atomic_t *v) { return atomic_dec_return(v) == 0; } #endif /** * atomic_sub_and_test - subtract value from variable and test result * @i: integer value to subtract * @v: pointer of type atomic_t * * Atomically subtracts @i from @v and returns * true if the result is zero, or false for all * other cases. */ #ifndef atomic_sub_and_test static inline bool atomic_sub_and_test(int i, atomic_t *v) { return atomic_sub_return(i, v) == 0; } #endif /** * atomic_add_negative - add and test if negative * @i: integer value to add * @v: pointer of type atomic_t * * Atomically adds @i to @v and returns true * if the result is negative, or false when * result is greater than or equal to zero. */ #ifndef atomic_add_negative static inline bool atomic_add_negative(int i, atomic_t *v) { return atomic_add_return(i, v) < 0; } #endif #ifndef atomic_inc_unless_negative static inline bool atomic_inc_unless_negative(atomic_t *v) { int c = atomic_read(v); do { if (unlikely(c < 0)) return false; } while (!atomic_try_cmpxchg(v, &c, c + 1)); return true; } #endif #ifndef atomic_dec_unless_positive static inline bool atomic_dec_unless_positive(atomic_t *v) { int c = atomic_read(v); do { if (unlikely(c > 0)) return false; } while (!atomic_try_cmpxchg(v, &c, c - 1)); return true; } #endif /* * atomic_dec_if_positive - decrement by 1 if old value positive * @v: pointer of type atomic_t * * The function returns the old value of *v minus 1, even if * the atomic variable, v, was not decremented. */ #ifndef atomic_dec_if_positive static inline int atomic_dec_if_positive(atomic_t *v) { int dec, c = atomic_read(v); do { dec = c - 1; if (unlikely(dec < 0)) break; } while (!atomic_try_cmpxchg(v, &c, dec)); return dec; } #endif #define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c)) #define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c)) #ifdef CONFIG_GENERIC_ATOMIC64 #include <asm-generic/atomic64.h> #endif #ifndef atomic64_read_acquire #define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter) #endif #ifndef atomic64_set_release #define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i)) #endif /* atomic64_add_return_relaxed */ #ifndef atomic64_add_return_relaxed #define atomic64_add_return_relaxed atomic64_add_return #define atomic64_add_return_acquire atomic64_add_return #define atomic64_add_return_release atomic64_add_return #else /* atomic64_add_return_relaxed */ #ifndef atomic64_add_return_acquire #define atomic64_add_return_acquire(...) \ __atomic_op_acquire(atomic64_add_return, __VA_ARGS__) #endif #ifndef atomic64_add_return_release #define atomic64_add_return_release(...) \ __atomic_op_release(atomic64_add_return, __VA_ARGS__) #endif #ifndef atomic64_add_return #define atomic64_add_return(...) \ __atomic_op_fence(atomic64_add_return, __VA_ARGS__) #endif #endif /* atomic64_add_return_relaxed */ #ifndef atomic64_inc #define atomic64_inc(v) atomic64_add(1, (v)) #endif /* atomic64_inc_return_relaxed */ #ifndef atomic64_inc_return_relaxed #ifndef atomic64_inc_return #define atomic64_inc_return(v) atomic64_add_return(1, (v)) #define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1, (v)) #define atomic64_inc_return_acquire(v) atomic64_add_return_acquire(1, (v)) #define atomic64_inc_return_release(v) atomic64_add_return_release(1, (v)) #else /* atomic64_inc_return */ #define atomic64_inc_return_relaxed atomic64_inc_return #define atomic64_inc_return_acquire atomic64_inc_return #define atomic64_inc_return_release atomic64_inc_return #endif /* atomic64_inc_return */ #else /* atomic64_inc_return_relaxed */ #ifndef atomic64_inc_return_acquire #define atomic64_inc_return_acquire(...) \ __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__) #endif #ifndef atomic64_inc_return_release #define atomic64_inc_return_release(...) \ __atomic_op_release(atomic64_inc_return, __VA_ARGS__) #endif #ifndef atomic64_inc_return #define atomic64_inc_return(...) \ __atomic_op_fence(atomic64_inc_return, __VA_ARGS__) #endif #endif /* atomic64_inc_return_relaxed */ /* atomic64_sub_return_relaxed */ #ifndef atomic64_sub_return_relaxed #define atomic64_sub_return_relaxed atomic64_sub_return #define atomic64_sub_return_acquire atomic64_sub_return #define atomic64_sub_return_release atomic64_sub_return #else /* atomic64_sub_return_relaxed */ #ifndef atomic64_sub_return_acquire #define atomic64_sub_return_acquire(...) \ __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__) #endif #ifndef atomic64_sub_return_release #define atomic64_sub_return_release(...) \ __atomic_op_release(atomic64_sub_return, __VA_ARGS__) #endif #ifndef atomic64_sub_return #define atomic64_sub_return(...) \ __atomic_op_fence(atomic64_sub_return, __VA_ARGS__) #endif #endif /* atomic64_sub_return_relaxed */ #ifndef atomic64_dec #define atomic64_dec(v) atomic64_sub(1, (v)) #endif /* atomic64_dec_return_relaxed */ #ifndef atomic64_dec_return_relaxed #ifndef atomic64_dec_return #define atomic64_dec_return(v) atomic64_sub_return(1, (v)) #define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1, (v)) #define atomic64_dec_return_acquire(v) atomic64_sub_return_acquire(1, (v)) #define atomic64_dec_return_release(v) atomic64_sub_return_release(1, (v)) #else /* atomic64_dec_return */ #define atomic64_dec_return_relaxed atomic64_dec_return #define atomic64_dec_return_acquire atomic64_dec_return #define atomic64_dec_return_release atomic64_dec_return #endif /* atomic64_dec_return */ #else /* atomic64_dec_return_relaxed */ #ifndef atomic64_dec_return_acquire #define atomic64_dec_return_acquire(...) \ __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__) #endif #ifndef atomic64_dec_return_release #define atomic64_dec_return_release(...) \ __atomic_op_release(atomic64_dec_return, __VA_ARGS__) #endif #ifndef atomic64_dec_return #define atomic64_dec_return(...) \ __atomic_op_fence(atomic64_dec_return, __VA_ARGS__) #endif #endif /* atomic64_dec_return_relaxed */ /* atomic64_fetch_add_relaxed */ #ifndef atomic64_fetch_add_relaxed #define atomic64_fetch_add_relaxed atomic64_fetch_add #define atomic64_fetch_add_acquire atomic64_fetch_add #define atomic64_fetch_add_release atomic64_fetch_add #else /* atomic64_fetch_add_relaxed */ #ifndef atomic64_fetch_add_acquire #define atomic64_fetch_add_acquire(...) \ __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__) #endif #ifndef atomic64_fetch_add_release #define atomic64_fetch_add_release(...) \ __atomic_op_release(atomic64_fetch_add, __VA_ARGS__) #endif #ifndef atomic64_fetch_add #define atomic64_fetch_add(...) \ __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__) #endif #endif /* atomic64_fetch_add_relaxed */ /* atomic64_fetch_inc_relaxed */ #ifndef atomic64_fetch_inc_relaxed #ifndef atomic64_fetch_inc #define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v)) #define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v)) #define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v)) #define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v)) #else /* atomic64_fetch_inc */ #define atomic64_fetch_inc_relaxed atomic64_fetch_inc #define atomic64_fetch_inc_acquire atomic64_fetch_inc #define atomic64_fetch_inc_release atomic64_fetch_inc #endif /* atomic64_fetch_inc */ #else /* atomic64_fetch_inc_relaxed */ #ifndef atomic64_fetch_inc_acquire #define atomic64_fetch_inc_acquire(...) \ __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__) #endif #ifndef atomic64_fetch_inc_release #define atomic64_fetch_inc_release(...) \ __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__) #endif #ifndef atomic64_fetch_inc #define atomic64_fetch_inc(...) \ __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__) #endif #endif /* atomic64_fetch_inc_relaxed */ /* atomic64_fetch_sub_relaxed */ #ifndef atomic64_fetch_sub_relaxed #define atomic64_fetch_sub_relaxed atomic64_fetch_sub #define atomic64_fetch_sub_acquire atomic64_fetch_sub #define atomic64_fetch_sub_release atomic64_fetch_sub #else /* atomic64_fetch_sub_relaxed */ #ifndef atomic64_fetch_sub_acquire #define atomic64_fetch_sub_acquire(...) \ __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__) #endif #ifndef atomic64_fetch_sub_release #define atomic64_fetch_sub_release(...) \ __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__) #endif #ifndef atomic64_fetch_sub #define atomic64_fetch_sub(...) \ __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__) #endif #endif /* atomic64_fetch_sub_relaxed */ /* atomic64_fetch_dec_relaxed */ #ifndef atomic64_fetch_dec_relaxed #ifndef atomic64_fetch_dec #define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v)) #define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v)) #define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v)) #define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v)) #else /* atomic64_fetch_dec */ #define atomic64_fetch_dec_relaxed atomic64_fetch_dec #define atomic64_fetch_dec_acquire atomic64_fetch_dec #define atomic64_fetch_dec_release atomic64_fetch_dec #endif /* atomic64_fetch_dec */ #else /* atomic64_fetch_dec_relaxed */ #ifndef atomic64_fetch_dec_acquire #define atomic64_fetch_dec_acquire(...) \ __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__) #endif #ifndef atomic64_fetch_dec_release #define atomic64_fetch_dec_release(...) \ __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__) #endif #ifndef atomic64_fetch_dec #define atomic64_fetch_dec(...) \ __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__) #endif #endif /* atomic64_fetch_dec_relaxed */ /* atomic64_fetch_or_relaxed */ #ifndef atomic64_fetch_or_relaxed #define atomic64_fetch_or_relaxed atomic64_fetch_or #define atomic64_fetch_or_acquire atomic64_fetch_or #define atomic64_fetch_or_release atomic64_fetch_or #else /* atomic64_fetch_or_relaxed */ #ifndef atomic64_fetch_or_acquire #define atomic64_fetch_or_acquire(...) \ __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__) #endif #ifndef atomic64_fetch_or_release #define atomic64_fetch_or_release(...) \ __atomic_op_release(atomic64_fetch_or, __VA_ARGS__) #endif #ifndef atomic64_fetch_or #define atomic64_fetch_or(...) \ __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__) #endif #endif /* atomic64_fetch_or_relaxed */ /* atomic64_fetch_and_relaxed */ #ifndef atomic64_fetch_and_relaxed #define atomic64_fetch_and_relaxed atomic64_fetch_and #define atomic64_fetch_and_acquire atomic64_fetch_and #define atomic64_fetch_and_release atomic64_fetch_and #else /* atomic64_fetch_and_relaxed */ #ifndef atomic64_fetch_and_acquire #define atomic64_fetch_and_acquire(...) \ __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__) #endif #ifndef atomic64_fetch_and_release #define atomic64_fetch_and_release(...) \ __atomic_op_release(atomic64_fetch_and, __VA_ARGS__) #endif #ifndef atomic64_fetch_and #define atomic64_fetch_and(...) \ __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__) #endif #endif /* atomic64_fetch_and_relaxed */ #ifndef atomic64_andnot #define atomic64_andnot(i, v) atomic64_and(~(long long)(i), (v)) #endif #ifndef atomic64_fetch_andnot_relaxed #ifndef atomic64_fetch_andnot #define atomic64_fetch_andnot(i, v) atomic64_fetch_and(~(long long)(i), (v)) #define atomic64_fetch_andnot_relaxed(i, v) atomic64_fetch_and_relaxed(~(long long)(i), (v)) #define atomic64_fetch_andnot_acquire(i, v) atomic64_fetch_and_acquire(~(long long)(i), (v)) #define atomic64_fetch_andnot_release(i, v) atomic64_fetch_and_release(~(long long)(i), (v)) #else /* atomic64_fetch_andnot */ #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot #define atomic64_fetch_andnot_release atomic64_fetch_andnot #endif /* atomic64_fetch_andnot */ #else /* atomic64_fetch_andnot_relaxed */ #ifndef atomic64_fetch_andnot_acquire #define atomic64_fetch_andnot_acquire(...) \ __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__) #endif #ifndef atomic64_fetch_andnot_release #define atomic64_fetch_andnot_release(...) \ __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__) #endif #ifndef atomic64_fetch_andnot #define atomic64_fetch_andnot(...) \ __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__) #endif #endif /* atomic64_fetch_andnot_relaxed */ /* atomic64_fetch_xor_relaxed */ #ifndef atomic64_fetch_xor_relaxed #define atomic64_fetch_xor_relaxed atomic64_fetch_xor #define atomic64_fetch_xor_acquire atomic64_fetch_xor #define atomic64_fetch_xor_release atomic64_fetch_xor #else /* atomic64_fetch_xor_relaxed */ #ifndef atomic64_fetch_xor_acquire #define atomic64_fetch_xor_acquire(...) \ __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__) #endif #ifndef atomic64_fetch_xor_release #define atomic64_fetch_xor_release(...) \ __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__) #endif #ifndef atomic64_fetch_xor #define atomic64_fetch_xor(...) \ __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__) #endif #endif /* atomic64_fetch_xor_relaxed */ /* atomic64_xchg_relaxed */ #ifndef atomic64_xchg_relaxed #define atomic64_xchg_relaxed atomic64_xchg #define atomic64_xchg_acquire atomic64_xchg #define atomic64_xchg_release atomic64_xchg #else /* atomic64_xchg_relaxed */ #ifndef atomic64_xchg_acquire #define atomic64_xchg_acquire(...) \ __atomic_op_acquire(atomic64_xchg, __VA_ARGS__) #endif #ifndef atomic64_xchg_release #define atomic64_xchg_release(...) \ __atomic_op_release(atomic64_xchg, __VA_ARGS__) #endif #ifndef atomic64_xchg #define atomic64_xchg(...) \ __atomic_op_fence(atomic64_xchg, __VA_ARGS__) #endif #endif /* atomic64_xchg_relaxed */ /* atomic64_cmpxchg_relaxed */ #ifndef atomic64_cmpxchg_relaxed #define atomic64_cmpxchg_relaxed atomic64_cmpxchg #define atomic64_cmpxchg_acquire atomic64_cmpxchg #define atomic64_cmpxchg_release atomic64_cmpxchg #else /* atomic64_cmpxchg_relaxed */ #ifndef atomic64_cmpxchg_acquire #define atomic64_cmpxchg_acquire(...) \ __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__) #endif #ifndef atomic64_cmpxchg_release #define atomic64_cmpxchg_release(...) \ __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__) #endif #ifndef atomic64_cmpxchg #define atomic64_cmpxchg(...) \ __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__) #endif #endif /* atomic64_cmpxchg_relaxed */ #ifndef atomic64_try_cmpxchg #define __atomic64_try_cmpxchg(type, _p, _po, _n) \ ({ \ typeof(_po) __po = (_po); \ typeof(*(_po)) __r, __o = *__po; \ __r = atomic64_cmpxchg##type((_p), __o, (_n)); \ if (unlikely(__r != __o)) \ *__po = __r; \ likely(__r == __o); \ }) #define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n) #define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n) #define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n) #define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n) #else /* atomic64_try_cmpxchg */ #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg #endif /* atomic64_try_cmpxchg */ /** * atomic64_fetch_add_unless - add unless the number is already a given value * @v: pointer of type atomic64_t * @a: the amount to add to v... * @u: ...unless v is equal to u. * * Atomically adds @a to @v, if @v was not already @u. * Returns the original value of @v. */ #ifndef atomic64_fetch_add_unless static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a, long long u) { long long c = atomic64_read(v); do { if (unlikely(c == u)) break; } while (!atomic64_try_cmpxchg(v, &c, c + a)); return c; } #endif /** * atomic64_add_unless - add unless the number is already a given value * @v: pointer of type atomic_t * @a: the amount to add to v... * @u: ...unless v is equal to u. * * Atomically adds @a to @v, if @v was not already @u. * Returns true if the addition was done. */ static inline bool atomic64_add_unless(atomic64_t *v, long long a, long long u) { return atomic64_fetch_add_unless(v, a, u) != u; } /** * atomic64_inc_not_zero - increment unless the number is zero * @v: pointer of type atomic64_t * * Atomically increments @v by 1, if @v is non-zero. * Returns true if the increment was done. */ #ifndef atomic64_inc_not_zero #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) #endif /** * atomic64_inc_and_test - increment and test * @v: pointer of type atomic64_t * * Atomically increments @v by 1 * and returns true if the result is zero, or false for all * other cases. */ #ifndef atomic64_inc_and_test static inline bool atomic64_inc_and_test(atomic64_t *v) { return atomic64_inc_return(v) == 0; } #endif /** * atomic64_dec_and_test - decrement and test * @v: pointer of type atomic64_t * * Atomically decrements @v by 1 and * returns true if the result is 0, or false for all other * cases. */ #ifndef atomic64_dec_and_test static inline bool atomic64_dec_and_test(atomic64_t *v) { return atomic64_dec_return(v) == 0; } #endif /** * atomic64_sub_and_test - subtract value from variable and test result * @i: integer value to subtract * @v: pointer of type atomic64_t * * Atomically subtracts @i from @v and returns * true if the result is zero, or false for all * other cases. */ #ifndef atomic64_sub_and_test static inline bool atomic64_sub_and_test(long long i, atomic64_t *v) { return atomic64_sub_return(i, v) == 0; } #endif /** * atomic64_add_negative - add and test if negative * @i: integer value to add * @v: pointer of type atomic64_t * * Atomically adds @i to @v and returns true * if the result is negative, or false when * result is greater than or equal to zero. */ #ifndef atomic64_add_negative static inline bool atomic64_add_negative(long long i, atomic64_t *v) { return atomic64_add_return(i, v) < 0; } #endif #ifndef atomic64_inc_unless_negative static inline bool atomic64_inc_unless_negative(atomic64_t *v) { long long c = atomic64_read(v); do { if (unlikely(c < 0)) return false; } while (!atomic64_try_cmpxchg(v, &c, c + 1)); return true; } #endif #ifndef atomic64_dec_unless_positive static inline bool atomic64_dec_unless_positive(atomic64_t *v) { long long c = atomic64_read(v); do { if (unlikely(c > 0)) return false; } while (!atomic64_try_cmpxchg(v, &c, c - 1)); return true; } #endif /* * atomic64_dec_if_positive - decrement by 1 if old value positive * @v: pointer of type atomic64_t * * The function returns the old value of *v minus 1, even if * the atomic64 variable, v, was not decremented. */ #ifndef atomic64_dec_if_positive static inline long long atomic64_dec_if_positive(atomic64_t *v) { long long dec, c = atomic64_read(v); do { dec = c - 1; if (unlikely(dec < 0)) break; } while (!atomic64_try_cmpxchg(v, &c, dec)); return dec; } #endif #define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c)) #define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c)) #include <asm-generic/atomic-long.h> #endif /* _LINUX_ATOMIC_H */
Information contained on this website is for historical information purposes only and does not indicate or represent copyright ownership.
Created with Cregit http://github.com/cregit/cregit
Version 2.0-RC1