Release 4.7 include/linux/bitmap.h
#ifndef __LINUX_BITMAP_H
#define __LINUX_BITMAP_H
#ifndef __ASSEMBLY__
#include <linux/types.h>
#include <linux/bitops.h>
#include <linux/string.h>
#include <linux/kernel.h>
/*
* bitmaps provide bit arrays that consume one or more unsigned
* longs. The bitmap interface and available operations are listed
* here, in bitmap.h
*
* Function implementations generic to all architectures are in
* lib/bitmap.c. Functions implementations that are architecture
* specific are in various include/asm-<arch>/bitops.h headers
* and other arch/<arch> specific files.
*
* See lib/bitmap.c for more details.
*/
/*
* The available bitmap operations and their rough meaning in the
* case that the bitmap is a single unsigned long are thus:
*
* Note that nbits should be always a compile time evaluable constant.
* Otherwise many inlines will generate horrible code.
*
* bitmap_zero(dst, nbits) *dst = 0UL
* bitmap_fill(dst, nbits) *dst = ~0UL
* bitmap_copy(dst, src, nbits) *dst = *src
* bitmap_and(dst, src1, src2, nbits) *dst = *src1 & *src2
* bitmap_or(dst, src1, src2, nbits) *dst = *src1 | *src2
* bitmap_xor(dst, src1, src2, nbits) *dst = *src1 ^ *src2
* bitmap_andnot(dst, src1, src2, nbits) *dst = *src1 & ~(*src2)
* bitmap_complement(dst, src, nbits) *dst = ~(*src)
* bitmap_equal(src1, src2, nbits) Are *src1 and *src2 equal?
* bitmap_intersects(src1, src2, nbits) Do *src1 and *src2 overlap?
* bitmap_subset(src1, src2, nbits) Is *src1 a subset of *src2?
* bitmap_empty(src, nbits) Are all bits zero in *src?
* bitmap_full(src, nbits) Are all bits set in *src?
* bitmap_weight(src, nbits) Hamming Weight: number set bits
* bitmap_set(dst, pos, nbits) Set specified bit area
* bitmap_clear(dst, pos, nbits) Clear specified bit area
* bitmap_find_next_zero_area(buf, len, pos, n, mask) Find bit free area
* bitmap_find_next_zero_area_off(buf, len, pos, n, mask) as above
* bitmap_shift_right(dst, src, n, nbits) *dst = *src >> n
* bitmap_shift_left(dst, src, n, nbits) *dst = *src << n
* bitmap_remap(dst, src, old, new, nbits) *dst = map(old, new)(src)
* bitmap_bitremap(oldbit, old, new, nbits) newbit = map(old, new)(oldbit)
* bitmap_onto(dst, orig, relmap, nbits) *dst = orig relative to relmap
* bitmap_fold(dst, orig, sz, nbits) dst bits = orig bits mod sz
* bitmap_parse(buf, buflen, dst, nbits) Parse bitmap dst from kernel buf
* bitmap_parse_user(ubuf, ulen, dst, nbits) Parse bitmap dst from user buf
* bitmap_parselist(buf, dst, nbits) Parse bitmap dst from kernel buf
* bitmap_parselist_user(buf, dst, nbits) Parse bitmap dst from user buf
* bitmap_find_free_region(bitmap, bits, order) Find and allocate bit region
* bitmap_release_region(bitmap, pos, order) Free specified bit region
* bitmap_allocate_region(bitmap, pos, order) Allocate specified bit region
* bitmap_from_u32array(dst, nbits, buf, nwords) *dst = *buf (nwords 32b words)
* bitmap_to_u32array(buf, nwords, src, nbits) *buf = *dst (nwords 32b words)
*/
/*
* Also the following operations in asm/bitops.h apply to bitmaps.
*
* set_bit(bit, addr) *addr |= bit
* clear_bit(bit, addr) *addr &= ~bit
* change_bit(bit, addr) *addr ^= bit
* test_bit(bit, addr) Is bit set in *addr?
* test_and_set_bit(bit, addr) Set bit and return old value
* test_and_clear_bit(bit, addr) Clear bit and return old value
* test_and_change_bit(bit, addr) Change bit and return old value
* find_first_zero_bit(addr, nbits) Position first zero bit in *addr
* find_first_bit(addr, nbits) Position first set bit in *addr
* find_next_zero_bit(addr, nbits, bit) Position next zero bit in *addr >= bit
* find_next_bit(addr, nbits, bit) Position next set bit in *addr >= bit
*/
/*
* The DECLARE_BITMAP(name,bits) macro, in linux/types.h, can be used
* to declare an array named 'name' of just enough unsigned longs to
* contain all bit positions from 0 to 'bits' - 1.
*/
/*
* lib/bitmap.c provides these functions:
*/
extern int __bitmap_empty(const unsigned long *bitmap, unsigned int nbits);
extern int __bitmap_full(const unsigned long *bitmap, unsigned int nbits);
extern int __bitmap_equal(const unsigned long *bitmap1,
const unsigned long *bitmap2, unsigned int nbits);
extern void __bitmap_complement(unsigned long *dst, const unsigned long *src,
unsigned int nbits);
extern void __bitmap_shift_right(unsigned long *dst, const unsigned long *src,
unsigned int shift, unsigned int nbits);
extern void __bitmap_shift_left(unsigned long *dst, const unsigned long *src,
unsigned int shift, unsigned int nbits);
extern int __bitmap_and(unsigned long *dst, const unsigned long *bitmap1,
const unsigned long *bitmap2, unsigned int nbits);
extern void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1,
const unsigned long *bitmap2, unsigned int nbits);
extern void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1,
const unsigned long *bitmap2, unsigned int nbits);
extern int __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1,
const unsigned long *bitmap2, unsigned int nbits);
extern int __bitmap_intersects(const unsigned long *bitmap1,
const unsigned long *bitmap2, unsigned int nbits);
extern int __bitmap_subset(const unsigned long *bitmap1,
const unsigned long *bitmap2, unsigned int nbits);
extern int __bitmap_weight(const unsigned long *bitmap, unsigned int nbits);
extern void bitmap_set(unsigned long *map, unsigned int start, int len);
extern void bitmap_clear(unsigned long *map, unsigned int start, int len);
extern unsigned long bitmap_find_next_zero_area_off(unsigned long *map,
unsigned long size,
unsigned long start,
unsigned int nr,
unsigned long align_mask,
unsigned long align_offset);
/**
* bitmap_find_next_zero_area - find a contiguous aligned zero area
* @map: The address to base the search on
* @size: The bitmap size in bits
* @start: The bitnumber to start searching at
* @nr: The number of zeroed bits we're looking for
* @align_mask: Alignment mask for zero area
*
* The @align_mask should be one less than a power of 2; the effect is that
* the bit offset of all zero areas this function finds is multiples of that
* power of 2. A @align_mask of 0 means no alignment is required.
*/
static inline unsigned long
bitmap_find_next_zero_area(unsigned long *map,
unsigned long size,
unsigned long start,
unsigned int nr,
unsigned long align_mask)
{
return bitmap_find_next_zero_area_off(map, size, start, nr,
align_mask, 0);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
michal nazarewicz | michal nazarewicz | 45 | 100.00% | 1 | 100.00% |
| Total | 45 | 100.00% | 1 | 100.00% |
extern int __bitmap_parse(const char *buf, unsigned int buflen, int is_user,
unsigned long *dst, int nbits);
extern int bitmap_parse_user(const char __user *ubuf, unsigned int ulen,
unsigned long *dst, int nbits);
extern int bitmap_parselist(const char *buf, unsigned long *maskp,
int nmaskbits);
extern int bitmap_parselist_user(const char __user *ubuf, unsigned int ulen,
unsigned long *dst, int nbits);
extern void bitmap_remap(unsigned long *dst, const unsigned long *src,
const unsigned long *old, const unsigned long *new, unsigned int nbits);
extern int bitmap_bitremap(int oldbit,
const unsigned long *old, const unsigned long *new, int bits);
extern void bitmap_onto(unsigned long *dst, const unsigned long *orig,
const unsigned long *relmap, unsigned int bits);
extern void bitmap_fold(unsigned long *dst, const unsigned long *orig,
unsigned int sz, unsigned int nbits);
extern int bitmap_find_free_region(unsigned long *bitmap, unsigned int bits, int order);
extern void bitmap_release_region(unsigned long *bitmap, unsigned int pos, int order);
extern int bitmap_allocate_region(unsigned long *bitmap, unsigned int pos, int order);
extern unsigned int bitmap_from_u32array(unsigned long *bitmap,
unsigned int nbits,
const u32 *buf,
unsigned int nwords);
extern unsigned int bitmap_to_u32array(u32 *buf,
unsigned int nwords,
const unsigned long *bitmap,
unsigned int nbits);
#ifdef __BIG_ENDIAN
extern void bitmap_copy_le(unsigned long *dst, const unsigned long *src, unsigned int nbits);
#else
#define bitmap_copy_le bitmap_copy
#endif
extern unsigned int bitmap_ord_to_pos(const unsigned long *bitmap, unsigned int ord, unsigned int nbits);
extern int bitmap_print_to_pagebuf(bool list, char *buf,
const unsigned long *maskp, int nmaskbits);
#define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) & (BITS_PER_LONG - 1)))
#define BITMAP_LAST_WORD_MASK(nbits) (~0UL >> (-(nbits) & (BITS_PER_LONG - 1)))
#define small_const_nbits(nbits) \
(__builtin_constant_p(nbits) && (nbits) <= BITS_PER_LONG)
static inline void bitmap_zero(unsigned long *dst, unsigned int nbits)
{
if (small_const_nbits(nbits))
*dst = 0UL;
else {
unsigned int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long);
memset(dst, 0, len);
}
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
andrew morton | andrew morton | 50 | 90.91% | 1 | 33.33% |
rusty russell | rusty russell | 3 | 5.45% | 1 | 33.33% |
rasmus villemoes | rasmus villemoes | 2 | 3.64% | 1 | 33.33% |
| Total | 55 | 100.00% | 3 | 100.00% |
static inline void bitmap_fill(unsigned long *dst, unsigned int nbits)
{
unsigned int nlongs = BITS_TO_LONGS(nbits);
if (!small_const_nbits(nbits)) {
unsigned int len = (nlongs - 1) * sizeof(unsigned long);
memset(dst, 0xff, len);
}
dst[nlongs - 1] = BITMAP_LAST_WORD_MASK(nbits);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
andrew morton | andrew morton | 63 | 87.50% | 1 | 33.33% |
rusty russell | rusty russell | 5 | 6.94% | 1 | 33.33% |
rasmus villemoes | rasmus villemoes | 4 | 5.56% | 1 | 33.33% |
| Total | 72 | 100.00% | 3 | 100.00% |
static inline void bitmap_copy(unsigned long *dst, const unsigned long *src,
unsigned int nbits)
{
if (small_const_nbits(nbits))
*dst = *src;
else {
unsigned int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long);
memcpy(dst, src, len);
}
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
andrew morton | andrew morton | 57 | 91.94% | 1 | 33.33% |
rusty russell | rusty russell | 3 | 4.84% | 1 | 33.33% |
rasmus villemoes | rasmus villemoes | 2 | 3.23% | 1 | 33.33% |
| Total | 62 | 100.00% | 3 | 100.00% |
static inline int bitmap_and(unsigned long *dst, const unsigned long *src1,
const unsigned long *src2, unsigned int nbits)
{
if (small_const_nbits(nbits))
return (*dst = *src1 & *src2 & BITMAP_LAST_WORD_MASK(nbits)) != 0;
return __bitmap_and(dst, src1, src2, nbits);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
andrew morton | andrew morton | 48 | 72.73% | 1 | 20.00% |
linus torvalds | linus torvalds | 9 | 13.64% | 1 | 20.00% |
rasmus villemoes | rasmus villemoes | 6 | 9.09% | 2 | 40.00% |
rusty russell | rusty russell | 3 | 4.55% | 1 | 20.00% |
| Total | 66 | 100.00% | 5 | 100.00% |
static inline void bitmap_or(unsigned long *dst, const unsigned long *src1,
const unsigned long *src2, unsigned int nbits)
{
if (small_const_nbits(nbits))
*dst = *src1 | *src2;
else
__bitmap_or(dst, src1, src2, nbits);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
andrew morton | andrew morton | 52 | 92.86% | 1 | 33.33% |
rusty russell | rusty russell | 3 | 5.36% | 1 | 33.33% |
rasmus villemoes | rasmus villemoes | 1 | 1.79% | 1 | 33.33% |
| Total | 56 | 100.00% | 3 | 100.00% |
static inline void bitmap_xor(unsigned long *dst, const unsigned long *src1,
const unsigned long *src2, unsigned int nbits)
{
if (small_const_nbits(nbits))
*dst = *src1 ^ *src2;
else
__bitmap_xor(dst, src1, src2, nbits);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
andrew morton | andrew morton | 52 | 92.86% | 1 | 33.33% |
rusty russell | rusty russell | 3 | 5.36% | 1 | 33.33% |
rasmus villemoes | rasmus villemoes | 1 | 1.79% | 1 | 33.33% |
| Total | 56 | 100.00% | 3 | 100.00% |
static inline int bitmap_andnot(unsigned long *dst, const unsigned long *src1,
const unsigned long *src2, unsigned int nbits)
{
if (small_const_nbits(nbits))
return (*dst = *src1 & ~(*src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0;
return __bitmap_andnot(dst, src1, src2, nbits);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
andrew morton | andrew morton | 51 | 73.91% | 1 | 20.00% |
linus torvalds | linus torvalds | 9 | 13.04% | 1 | 20.00% |
rasmus villemoes | rasmus villemoes | 6 | 8.70% | 2 | 40.00% |
rusty russell | rusty russell | 3 | 4.35% | 1 | 20.00% |
| Total | 69 | 100.00% | 5 | 100.00% |
static inline void bitmap_complement(unsigned long *dst, const unsigned long *src,
unsigned int nbits)
{
if (small_const_nbits(nbits))
*dst = ~(*src);
else
__bitmap_complement(dst, src, nbits);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
andrew morton | andrew morton | 44 | 91.67% | 1 | 33.33% |
rusty russell | rusty russell | 3 | 6.25% | 1 | 33.33% |
rasmus villemoes | rasmus villemoes | 1 | 2.08% | 1 | 33.33% |
| Total | 48 | 100.00% | 3 | 100.00% |
static inline int bitmap_equal(const unsigned long *src1,
const unsigned long *src2, unsigned int nbits)
{
if (small_const_nbits(nbits))
return ! ((*src1 ^ *src2) & BITMAP_LAST_WORD_MASK(nbits));
else
return __bitmap_equal(src1, src2, nbits);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
andrew morton | andrew morton | 54 | 93.10% | 1 | 33.33% |
rusty russell | rusty russell | 3 | 5.17% | 1 | 33.33% |
rasmus villemoes | rasmus villemoes | 1 | 1.72% | 1 | 33.33% |
| Total | 58 | 100.00% | 3 | 100.00% |
static inline int bitmap_intersects(const unsigned long *src1,
const unsigned long *src2, unsigned int nbits)
{
if (small_const_nbits(nbits))
return ((*src1 & *src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0;
else
return __bitmap_intersects(src1, src2, nbits);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
andrew morton | andrew morton | 55 | 93.22% | 1 | 33.33% |
rusty russell | rusty russell | 3 | 5.08% | 1 | 33.33% |
rasmus villemoes | rasmus villemoes | 1 | 1.69% | 1 | 33.33% |
| Total | 59 | 100.00% | 3 | 100.00% |
static inline int bitmap_subset(const unsigned long *src1,
const unsigned long *src2, unsigned int nbits)
{
if (small_const_nbits(nbits))
return ! ((*src1 & ~(*src2)) & BITMAP_LAST_WORD_MASK(nbits));
else
return __bitmap_subset(src1, src2, nbits);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
andrew morton | andrew morton | 57 | 93.44% | 1 | 33.33% |
rusty russell | rusty russell | 3 | 4.92% | 1 | 33.33% |
rasmus villemoes | rasmus villemoes | 1 | 1.64% | 1 | 33.33% |
| Total | 61 | 100.00% | 3 | 100.00% |
static inline int bitmap_empty(const unsigned long *src, unsigned nbits)
{
if (small_const_nbits(nbits))
return ! (*src & BITMAP_LAST_WORD_MASK(nbits));
return find_first_bit(src, nbits) == nbits;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
andrew morton | andrew morton | 38 | 84.44% | 1 | 25.00% |
yury norov | yury norov | 3 | 6.67% | 1 | 25.00% |
rusty russell | rusty russell | 3 | 6.67% | 1 | 25.00% |
rasmus villemoes | rasmus villemoes | 1 | 2.22% | 1 | 25.00% |
| Total | 45 | 100.00% | 4 | 100.00% |
static inline int bitmap_full(const unsigned long *src, unsigned int nbits)
{
if (small_const_nbits(nbits))
return ! (~(*src) & BITMAP_LAST_WORD_MASK(nbits));
return find_first_zero_bit(src, nbits) == nbits;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
andrew morton | andrew morton | 42 | 85.71% | 1 | 25.00% |
rusty russell | rusty russell | 3 | 6.12% | 1 | 25.00% |
yury norov | yury norov | 3 | 6.12% | 1 | 25.00% |
rasmus villemoes | rasmus villemoes | 1 | 2.04% | 1 | 25.00% |
| Total | 49 | 100.00% | 4 | 100.00% |
static __always_inline int bitmap_weight(const unsigned long *src, unsigned int nbits)
{
if (small_const_nbits(nbits))
return hweight_long(*src & BITMAP_LAST_WORD_MASK(nbits));
return __bitmap_weight(src, nbits);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
andrew morton | andrew morton | 23 | 52.27% | 1 | 20.00% |
andi kleen | andi kleen | 16 | 36.36% | 1 | 20.00% |
rusty russell | rusty russell | 3 | 6.82% | 1 | 20.00% |
rasmus villemoes | rasmus villemoes | 1 | 2.27% | 1 | 20.00% |
denys vlasenko | denys vlasenko | 1 | 2.27% | 1 | 20.00% |
| Total | 44 | 100.00% | 5 | 100.00% |
static inline void bitmap_shift_right(unsigned long *dst, const unsigned long *src,
unsigned int shift, int nbits)
{
if (small_const_nbits(nbits))
*dst = (*src & BITMAP_LAST_WORD_MASK(nbits)) >> shift;
else
__bitmap_shift_right(dst, src, shift, nbits);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
andrew morton | andrew morton | 45 | 76.27% | 1 | 25.00% |
rasmus villemoes | rasmus villemoes | 11 | 18.64% | 2 | 50.00% |
rusty russell | rusty russell | 3 | 5.08% | 1 | 25.00% |
| Total | 59 | 100.00% | 4 | 100.00% |
static inline void bitmap_shift_left(unsigned long *dst, const unsigned long *src,
unsigned int shift, unsigned int nbits)
{
if (small_const_nbits(nbits))
*dst = (*src << shift) & BITMAP_LAST_WORD_MASK(nbits);
else
__bitmap_shift_left(dst, src, shift, nbits);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
andrew morton | andrew morton | 52 | 86.67% | 1 | 33.33% |
rasmus villemoes | rasmus villemoes | 5 | 8.33% | 1 | 33.33% |
rusty russell | rusty russell | 3 | 5.00% | 1 | 33.33% |
| Total | 60 | 100.00% | 3 | 100.00% |
static inline int bitmap_parse(const char *buf, unsigned int buflen,
unsigned long *maskp, int nmaskbits)
{
return __bitmap_parse(buf, buflen, 0, maskp, nmaskbits);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
reinette chatre | reinette chatre | 38 | 100.00% | 1 | 100.00% |
| Total | 38 | 100.00% | 1 | 100.00% |
#endif /* __ASSEMBLY__ */
#endif /* __LINUX_BITMAP_H */
Overall Contributors
| Person | Tokens | Prop | Commits | CommitProp |
andrew morton | andrew morton | 1089 | 61.63% | 5 | 10.42% |
paul jackson | paul jackson | 119 | 6.73% | 3 | 6.25% |
rasmus villemoes | rasmus villemoes | 107 | 6.06% | 24 | 50.00% |
reinette chatre | reinette chatre | 64 | 3.62% | 1 | 2.08% |
rusty russell | rusty russell | 58 | 3.28% | 1 | 2.08% |
akinobu mita | akinobu mita | 56 | 3.17% | 1 | 2.08% |
michal nazarewicz | michal nazarewicz | 51 | 2.89% | 1 | 2.08% |
david decotigny | david decotigny | 49 | 2.77% | 1 | 2.08% |
james bottomley | james bottomley | 46 | 2.60% | 1 | 2.08% |
mike travis | mike travis | 23 | 1.30% | 1 | 2.08% |
sudeep holla | sudeep holla | 21 | 1.19% | 1 | 2.08% |
linus torvalds | linus torvalds | 20 | 1.13% | 1 | 2.08% |
david vrabel | david vrabel | 17 | 0.96% | 1 | 2.08% |
andi kleen | andi kleen | 16 | 0.91% | 1 | 2.08% |
michal hocko | michal hocko | 15 | 0.85% | 1 | 2.08% |
yury norov | yury norov | 6 | 0.34% | 1 | 2.08% |
huang ying | huang ying | 6 | 0.34% | 1 | 2.08% |
jiri slaby | jiri slaby | 3 | 0.17% | 1 | 2.08% |
denys vlasenko | denys vlasenko | 1 | 0.06% | 1 | 2.08% |
| Total | 1767 | 100.00% | 48 | 100.00% |
Information contained on this website is for historical information purposes only and does not indicate or represent copyright ownership.