Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux
1
fork

Configure Feed

Select the types of activity you want to include in your feed.

at d986ba0329dcca102e227995371135c9bbcefb6b 899 lines 32 kB view raw
1/* SPDX-License-Identifier: GPL-2.0 */ 2#ifndef __LINUX_BITMAP_H 3#define __LINUX_BITMAP_H 4 5#ifndef __ASSEMBLY__ 6 7#include <linux/align.h> 8#include <linux/bitops.h> 9#include <linux/cleanup.h> 10#include <linux/errno.h> 11#include <linux/find.h> 12#include <linux/limits.h> 13#include <linux/string.h> 14#include <linux/types.h> 15#include <linux/bitmap-str.h> 16 17struct device; 18 19/* 20 * bitmaps provide bit arrays that consume one or more unsigned 21 * longs. The bitmap interface and available operations are listed 22 * here, in bitmap.h 23 * 24 * Function implementations generic to all architectures are in 25 * lib/bitmap.c. Functions implementations that are architecture 26 * specific are in various arch/<arch>/include/asm/bitops.h headers 27 * and other arch/<arch> specific files. 28 * 29 * See lib/bitmap.c for more details. 30 */ 31 32/** 33 * DOC: bitmap overview 34 * 35 * The available bitmap operations and their rough meaning in the 36 * case that the bitmap is a single unsigned long are thus: 37 * 38 * The generated code is more efficient when nbits is known at 39 * compile-time and at most BITS_PER_LONG. 40 * 41 * :: 42 * 43 * bitmap_zero(dst, nbits) *dst = 0UL 44 * bitmap_fill(dst, nbits) *dst = ~0UL 45 * bitmap_copy(dst, src, nbits) *dst = *src 46 * bitmap_and(dst, src1, src2, nbits) *dst = *src1 & *src2 47 * bitmap_or(dst, src1, src2, nbits) *dst = *src1 | *src2 48 * bitmap_weighted_or(dst, src1, src2, nbits) *dst = *src1 | *src2. Returns Hamming Weight of dst 49 * bitmap_weighted_xor(dst, src1, src2, nbits) *dst = *src1 ^ *src2. Returns Hamming Weight of dst 50 * bitmap_xor(dst, src1, src2, nbits) *dst = *src1 ^ *src2 51 * bitmap_andnot(dst, src1, src2, nbits) *dst = *src1 & ~(*src2) 52 * bitmap_complement(dst, src, nbits) *dst = ~(*src) 53 * bitmap_equal(src1, src2, nbits) Are *src1 and *src2 equal? 54 * bitmap_intersects(src1, src2, nbits) Do *src1 and *src2 overlap? 55 * bitmap_subset(src1, src2, nbits) Is *src1 a subset of *src2? 56 * bitmap_empty(src, nbits) Are all bits zero in *src? 57 * bitmap_full(src, nbits) Are all bits set in *src? 58 * bitmap_weight(src, nbits) Hamming Weight: number set bits 59 * bitmap_weight_and(src1, src2, nbits) Hamming Weight of and'ed bitmap 60 * bitmap_weight_andnot(src1, src2, nbits) Hamming Weight of andnot'ed bitmap 61 * bitmap_weight_from(src, start, end) Hamming Weight starting from @start 62 * bitmap_set(dst, pos, nbits) Set specified bit area 63 * bitmap_clear(dst, pos, nbits) Clear specified bit area 64 * bitmap_find_next_zero_area(buf, len, pos, n, mask) Find bit free area 65 * bitmap_find_next_zero_area_off(buf, len, pos, n, mask, mask_off) as above 66 * bitmap_shift_right(dst, src, n, nbits) *dst = *src >> n 67 * bitmap_shift_left(dst, src, n, nbits) *dst = *src << n 68 * bitmap_cut(dst, src, first, n, nbits) Cut n bits from first, copy rest 69 * bitmap_replace(dst, old, new, mask, nbits) *dst = (*old & ~(*mask)) | (*new & *mask) 70 * bitmap_scatter(dst, src, mask, nbits) *dst = map(dense, sparse)(src) 71 * bitmap_gather(dst, src, mask, nbits) *dst = map(sparse, dense)(src) 72 * bitmap_remap(dst, src, old, new, nbits) *dst = map(old, new)(src) 73 * bitmap_bitremap(oldbit, old, new, nbits) newbit = map(old, new)(oldbit) 74 * bitmap_onto(dst, orig, relmap, nbits) *dst = orig relative to relmap 75 * bitmap_fold(dst, orig, sz, nbits) dst bits = orig bits mod sz 76 * bitmap_parse(buf, buflen, dst, nbits) Parse bitmap dst from kernel buf 77 * bitmap_parse_user(ubuf, ulen, dst, nbits) Parse bitmap dst from user buf 78 * bitmap_parselist(buf, dst, nbits) Parse bitmap dst from kernel buf 79 * bitmap_parselist_user(buf, dst, nbits) Parse bitmap dst from user buf 80 * bitmap_find_free_region(bitmap, bits, order) Find and allocate bit region 81 * bitmap_release_region(bitmap, pos, order) Free specified bit region 82 * bitmap_allocate_region(bitmap, pos, order) Allocate specified bit region 83 * bitmap_from_arr32(dst, buf, nbits) Copy nbits from u32[] buf to dst 84 * bitmap_from_arr64(dst, buf, nbits) Copy nbits from u64[] buf to dst 85 * bitmap_to_arr32(buf, src, nbits) Copy nbits from buf to u32[] dst 86 * bitmap_to_arr64(buf, src, nbits) Copy nbits from buf to u64[] dst 87 * bitmap_get_value8(map, start) Get 8bit value from map at start 88 * bitmap_set_value8(map, value, start) Set 8bit value to map at start 89 * bitmap_read(map, start, nbits) Read an nbits-sized value from 90 * map at start 91 * bitmap_write(map, value, start, nbits) Write an nbits-sized value to 92 * map at start 93 * 94 * Note, bitmap_zero() and bitmap_fill() operate over the region of 95 * unsigned longs, that is, bits behind bitmap till the unsigned long 96 * boundary will be zeroed or filled as well. Consider to use 97 * bitmap_clear() or bitmap_set() to make explicit zeroing or filling 98 * respectively. 99 */ 100 101/** 102 * DOC: bitmap bitops 103 * 104 * Also the following operations in asm/bitops.h apply to bitmaps.:: 105 * 106 * set_bit(bit, addr) *addr |= bit 107 * clear_bit(bit, addr) *addr &= ~bit 108 * change_bit(bit, addr) *addr ^= bit 109 * test_bit(bit, addr) Is bit set in *addr? 110 * test_and_set_bit(bit, addr) Set bit and return old value 111 * test_and_clear_bit(bit, addr) Clear bit and return old value 112 * test_and_change_bit(bit, addr) Change bit and return old value 113 * find_first_zero_bit(addr, nbits) Position first zero bit in *addr 114 * find_first_bit(addr, nbits) Position first set bit in *addr 115 * find_next_zero_bit(addr, nbits, bit) 116 * Position next zero bit in *addr >= bit 117 * find_next_bit(addr, nbits, bit) Position next set bit in *addr >= bit 118 * find_next_and_bit(addr1, addr2, nbits, bit) 119 * Same as find_next_bit, but in 120 * (*addr1 & *addr2) 121 * 122 */ 123 124/** 125 * DOC: declare bitmap 126 * The DECLARE_BITMAP(name,bits) macro, in linux/types.h, can be used 127 * to declare an array named 'name' of just enough unsigned longs to 128 * contain all bit positions from 0 to 'bits' - 1. 129 */ 130 131/* 132 * Allocation and deallocation of bitmap. 133 * Provided in lib/bitmap.c to avoid circular dependency. 134 */ 135unsigned long *bitmap_alloc(unsigned int nbits, gfp_t flags); 136unsigned long *bitmap_zalloc(unsigned int nbits, gfp_t flags); 137unsigned long *bitmap_alloc_node(unsigned int nbits, gfp_t flags, int node); 138unsigned long *bitmap_zalloc_node(unsigned int nbits, gfp_t flags, int node); 139void bitmap_free(const unsigned long *bitmap); 140 141DEFINE_FREE(bitmap, unsigned long *, if (_T) bitmap_free(_T)) 142 143/* Managed variants of the above. */ 144unsigned long *devm_bitmap_alloc(struct device *dev, 145 unsigned int nbits, gfp_t flags); 146unsigned long *devm_bitmap_zalloc(struct device *dev, 147 unsigned int nbits, gfp_t flags); 148 149/* 150 * lib/bitmap.c provides these functions: 151 */ 152 153bool __bitmap_equal(const unsigned long *bitmap1, 154 const unsigned long *bitmap2, unsigned int nbits); 155bool __pure __bitmap_or_equal(const unsigned long *src1, 156 const unsigned long *src2, 157 const unsigned long *src3, 158 unsigned int nbits); 159void __bitmap_complement(unsigned long *dst, const unsigned long *src, 160 unsigned int nbits); 161void __bitmap_shift_right(unsigned long *dst, const unsigned long *src, 162 unsigned int shift, unsigned int nbits); 163void __bitmap_shift_left(unsigned long *dst, const unsigned long *src, 164 unsigned int shift, unsigned int nbits); 165void bitmap_cut(unsigned long *dst, const unsigned long *src, 166 unsigned int first, unsigned int cut, unsigned int nbits); 167bool __bitmap_and(unsigned long *dst, const unsigned long *bitmap1, 168 const unsigned long *bitmap2, unsigned int nbits); 169void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1, 170 const unsigned long *bitmap2, unsigned int nbits); 171unsigned int __bitmap_weighted_or(unsigned long *dst, const unsigned long *bitmap1, 172 const unsigned long *bitmap2, unsigned int nbits); 173unsigned int __bitmap_weighted_xor(unsigned long *dst, const unsigned long *bitmap1, 174 const unsigned long *bitmap2, unsigned int nbits); 175void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1, 176 const unsigned long *bitmap2, unsigned int nbits); 177bool __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1, 178 const unsigned long *bitmap2, unsigned int nbits); 179void __bitmap_replace(unsigned long *dst, 180 const unsigned long *old, const unsigned long *new, 181 const unsigned long *mask, unsigned int nbits); 182bool __bitmap_intersects(const unsigned long *bitmap1, 183 const unsigned long *bitmap2, unsigned int nbits); 184bool __bitmap_subset(const unsigned long *bitmap1, 185 const unsigned long *bitmap2, unsigned int nbits); 186unsigned int __bitmap_weight(const unsigned long *bitmap, unsigned int nbits); 187unsigned int __bitmap_weight_and(const unsigned long *bitmap1, 188 const unsigned long *bitmap2, unsigned int nbits); 189unsigned int __bitmap_weight_andnot(const unsigned long *bitmap1, 190 const unsigned long *bitmap2, unsigned int nbits); 191void __bitmap_set(unsigned long *map, unsigned int start, int len); 192void __bitmap_clear(unsigned long *map, unsigned int start, int len); 193 194unsigned long bitmap_find_next_zero_area_off(unsigned long *map, 195 unsigned long size, 196 unsigned long start, 197 unsigned int nr, 198 unsigned long align_mask, 199 unsigned long align_offset); 200 201/** 202 * bitmap_find_next_zero_area - find a contiguous aligned zero area 203 * @map: The address to base the search on 204 * @size: The bitmap size in bits 205 * @start: The bitnumber to start searching at 206 * @nr: The number of zeroed bits we're looking for 207 * @align_mask: Alignment mask for zero area 208 * 209 * The @align_mask should be one less than a power of 2; the effect is that 210 * the bit offset of all zero areas this function finds is multiples of that 211 * power of 2. A @align_mask of 0 means no alignment is required. 212 */ 213static __always_inline 214unsigned long bitmap_find_next_zero_area(unsigned long *map, 215 unsigned long size, 216 unsigned long start, 217 unsigned int nr, 218 unsigned long align_mask) 219{ 220 return bitmap_find_next_zero_area_off(map, size, start, nr, 221 align_mask, 0); 222} 223 224void bitmap_remap(unsigned long *dst, const unsigned long *src, 225 const unsigned long *old, const unsigned long *new, unsigned int nbits); 226int bitmap_bitremap(int oldbit, 227 const unsigned long *old, const unsigned long *new, int bits); 228void bitmap_onto(unsigned long *dst, const unsigned long *orig, 229 const unsigned long *relmap, unsigned int bits); 230void bitmap_fold(unsigned long *dst, const unsigned long *orig, 231 unsigned int sz, unsigned int nbits); 232 233#define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) & (BITS_PER_LONG - 1))) 234#define BITMAP_LAST_WORD_MASK(nbits) (~0UL >> (-(nbits) & (BITS_PER_LONG - 1))) 235 236#define bitmap_size(nbits) (ALIGN(nbits, BITS_PER_LONG) / BITS_PER_BYTE) 237 238static __always_inline void bitmap_zero(unsigned long *dst, unsigned int nbits) 239{ 240 unsigned int len = bitmap_size(nbits); 241 242 if (small_const_nbits(nbits)) 243 *dst = 0; 244 else 245 memset(dst, 0, len); 246} 247 248static __always_inline void bitmap_fill(unsigned long *dst, unsigned int nbits) 249{ 250 unsigned int len = bitmap_size(nbits); 251 252 if (small_const_nbits(nbits)) 253 *dst = ~0UL; 254 else 255 memset(dst, 0xff, len); 256} 257 258static __always_inline 259void bitmap_copy(unsigned long *dst, const unsigned long *src, unsigned int nbits) 260{ 261 unsigned int len = bitmap_size(nbits); 262 263 if (small_const_nbits(nbits)) 264 *dst = *src; 265 else 266 memcpy(dst, src, len); 267} 268 269/* 270 * Copy bitmap and clear tail bits in last word. 271 */ 272static __always_inline 273void bitmap_copy_clear_tail(unsigned long *dst, const unsigned long *src, unsigned int nbits) 274{ 275 bitmap_copy(dst, src, nbits); 276 if (nbits % BITS_PER_LONG) 277 dst[nbits / BITS_PER_LONG] &= BITMAP_LAST_WORD_MASK(nbits); 278} 279 280static inline void bitmap_copy_and_extend(unsigned long *to, 281 const unsigned long *from, 282 unsigned int count, unsigned int size) 283{ 284 unsigned int copy = BITS_TO_LONGS(count); 285 286 memcpy(to, from, copy * sizeof(long)); 287 if (count % BITS_PER_LONG) 288 to[copy - 1] &= BITMAP_LAST_WORD_MASK(count); 289 memset(to + copy, 0, bitmap_size(size) - copy * sizeof(long)); 290} 291 292/* 293 * On 32-bit systems bitmaps are represented as u32 arrays internally. On LE64 294 * machines the order of hi and lo parts of numbers match the bitmap structure. 295 * In both cases conversion is not needed when copying data from/to arrays of 296 * u32. But in LE64 case, typecast in bitmap_copy_clear_tail() may lead 297 * to out-of-bound access. To avoid that, both LE and BE variants of 64-bit 298 * architectures are not using bitmap_copy_clear_tail(). 299 */ 300#if BITS_PER_LONG == 64 301void bitmap_from_arr32(unsigned long *bitmap, const u32 *buf, 302 unsigned int nbits); 303void bitmap_to_arr32(u32 *buf, const unsigned long *bitmap, 304 unsigned int nbits); 305#else 306#define bitmap_from_arr32(bitmap, buf, nbits) \ 307 bitmap_copy_clear_tail((unsigned long *) (bitmap), \ 308 (const unsigned long *) (buf), (nbits)) 309#define bitmap_to_arr32(buf, bitmap, nbits) \ 310 bitmap_copy_clear_tail((unsigned long *) (buf), \ 311 (const unsigned long *) (bitmap), (nbits)) 312#endif 313 314/* 315 * On 64-bit systems bitmaps are represented as u64 arrays internally. So, 316 * the conversion is not needed when copying data from/to arrays of u64. 317 */ 318#if BITS_PER_LONG == 32 319void bitmap_from_arr64(unsigned long *bitmap, const u64 *buf, unsigned int nbits); 320void bitmap_to_arr64(u64 *buf, const unsigned long *bitmap, unsigned int nbits); 321#else 322#define bitmap_from_arr64(bitmap, buf, nbits) \ 323 bitmap_copy_clear_tail((unsigned long *)(bitmap), (const unsigned long *)(buf), (nbits)) 324#define bitmap_to_arr64(buf, bitmap, nbits) \ 325 bitmap_copy_clear_tail((unsigned long *)(buf), (const unsigned long *)(bitmap), (nbits)) 326#endif 327 328static __always_inline 329bool bitmap_and(unsigned long *dst, const unsigned long *src1, 330 const unsigned long *src2, unsigned int nbits) 331{ 332 if (small_const_nbits(nbits)) 333 return (*dst = *src1 & *src2 & BITMAP_LAST_WORD_MASK(nbits)) != 0; 334 return __bitmap_and(dst, src1, src2, nbits); 335} 336 337static __always_inline 338void bitmap_or(unsigned long *dst, const unsigned long *src1, 339 const unsigned long *src2, unsigned int nbits) 340{ 341 if (small_const_nbits(nbits)) 342 *dst = *src1 | *src2; 343 else 344 __bitmap_or(dst, src1, src2, nbits); 345} 346 347static __always_inline 348unsigned int bitmap_weighted_or(unsigned long *dst, const unsigned long *src1, 349 const unsigned long *src2, unsigned int nbits) 350{ 351 if (small_const_nbits(nbits)) { 352 *dst = *src1 | *src2; 353 return hweight_long(*dst & BITMAP_LAST_WORD_MASK(nbits)); 354 } else { 355 return __bitmap_weighted_or(dst, src1, src2, nbits); 356 } 357} 358 359static __always_inline 360unsigned int bitmap_weighted_xor(unsigned long *dst, const unsigned long *src1, 361 const unsigned long *src2, unsigned int nbits) 362{ 363 if (small_const_nbits(nbits)) { 364 *dst = *src1 ^ *src2; 365 return hweight_long(*dst & BITMAP_LAST_WORD_MASK(nbits)); 366 } else { 367 return __bitmap_weighted_xor(dst, src1, src2, nbits); 368 } 369} 370 371static __always_inline 372void bitmap_xor(unsigned long *dst, const unsigned long *src1, 373 const unsigned long *src2, unsigned int nbits) 374{ 375 if (small_const_nbits(nbits)) 376 *dst = *src1 ^ *src2; 377 else 378 __bitmap_xor(dst, src1, src2, nbits); 379} 380 381static __always_inline 382bool bitmap_andnot(unsigned long *dst, const unsigned long *src1, 383 const unsigned long *src2, unsigned int nbits) 384{ 385 if (small_const_nbits(nbits)) 386 return (*dst = *src1 & ~(*src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0; 387 return __bitmap_andnot(dst, src1, src2, nbits); 388} 389 390static __always_inline 391void bitmap_complement(unsigned long *dst, const unsigned long *src, unsigned int nbits) 392{ 393 if (small_const_nbits(nbits)) 394 *dst = ~(*src); 395 else 396 __bitmap_complement(dst, src, nbits); 397} 398 399#ifdef __LITTLE_ENDIAN 400#define BITMAP_MEM_ALIGNMENT 8 401#else 402#define BITMAP_MEM_ALIGNMENT (8 * sizeof(unsigned long)) 403#endif 404#define BITMAP_MEM_MASK (BITMAP_MEM_ALIGNMENT - 1) 405 406static __always_inline 407bool bitmap_equal(const unsigned long *src1, const unsigned long *src2, unsigned int nbits) 408{ 409 if (small_const_nbits(nbits)) 410 return !((*src1 ^ *src2) & BITMAP_LAST_WORD_MASK(nbits)); 411 if (__builtin_constant_p(nbits & BITMAP_MEM_MASK) && 412 IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT)) 413 return !memcmp(src1, src2, nbits / 8); 414 return __bitmap_equal(src1, src2, nbits); 415} 416 417/** 418 * bitmap_or_equal - Check whether the or of two bitmaps is equal to a third 419 * @src1: Pointer to bitmap 1 420 * @src2: Pointer to bitmap 2 will be or'ed with bitmap 1 421 * @src3: Pointer to bitmap 3. Compare to the result of *@src1 | *@src2 422 * @nbits: number of bits in each of these bitmaps 423 * 424 * Returns: True if (*@src1 | *@src2) == *@src3, false otherwise 425 */ 426static __always_inline 427bool bitmap_or_equal(const unsigned long *src1, const unsigned long *src2, 428 const unsigned long *src3, unsigned int nbits) 429{ 430 if (!small_const_nbits(nbits)) 431 return __bitmap_or_equal(src1, src2, src3, nbits); 432 433 return !(((*src1 | *src2) ^ *src3) & BITMAP_LAST_WORD_MASK(nbits)); 434} 435 436static __always_inline 437bool bitmap_intersects(const unsigned long *src1, const unsigned long *src2, unsigned int nbits) 438{ 439 if (small_const_nbits(nbits)) 440 return ((*src1 & *src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0; 441 else 442 return __bitmap_intersects(src1, src2, nbits); 443} 444 445static __always_inline 446bool bitmap_subset(const unsigned long *src1, const unsigned long *src2, unsigned int nbits) 447{ 448 if (small_const_nbits(nbits)) 449 return ! ((*src1 & ~(*src2)) & BITMAP_LAST_WORD_MASK(nbits)); 450 else 451 return __bitmap_subset(src1, src2, nbits); 452} 453 454static __always_inline 455bool bitmap_empty(const unsigned long *src, unsigned nbits) 456{ 457 if (small_const_nbits(nbits)) 458 return ! (*src & BITMAP_LAST_WORD_MASK(nbits)); 459 460 return find_first_bit(src, nbits) == nbits; 461} 462 463static __always_inline 464bool bitmap_full(const unsigned long *src, unsigned int nbits) 465{ 466 if (small_const_nbits(nbits)) 467 return ! (~(*src) & BITMAP_LAST_WORD_MASK(nbits)); 468 469 return find_first_zero_bit(src, nbits) == nbits; 470} 471 472static __always_inline 473unsigned int bitmap_weight(const unsigned long *src, unsigned int nbits) 474{ 475 if (small_const_nbits(nbits)) 476 return hweight_long(*src & BITMAP_LAST_WORD_MASK(nbits)); 477 return __bitmap_weight(src, nbits); 478} 479 480static __always_inline 481unsigned long bitmap_weight_and(const unsigned long *src1, 482 const unsigned long *src2, unsigned int nbits) 483{ 484 if (small_const_nbits(nbits)) 485 return hweight_long(*src1 & *src2 & BITMAP_LAST_WORD_MASK(nbits)); 486 return __bitmap_weight_and(src1, src2, nbits); 487} 488 489static __always_inline 490unsigned long bitmap_weight_andnot(const unsigned long *src1, 491 const unsigned long *src2, unsigned int nbits) 492{ 493 if (small_const_nbits(nbits)) 494 return hweight_long(*src1 & ~(*src2) & BITMAP_LAST_WORD_MASK(nbits)); 495 return __bitmap_weight_andnot(src1, src2, nbits); 496} 497 498/** 499 * bitmap_weight_from - Hamming weight for a memory region 500 * @bitmap: The base address 501 * @start: The bitnumber to starts weighting 502 * @end: the bitmap size in bits 503 * 504 * Returns the number of set bits in the region. If @start >= @end, 505 * return >= end. 506 */ 507static __always_inline 508unsigned long bitmap_weight_from(const unsigned long *bitmap, 509 unsigned int start, unsigned int end) 510{ 511 unsigned long w; 512 513 if (unlikely(start >= end)) 514 return end; 515 516 if (small_const_nbits(end)) 517 return hweight_long(*bitmap & GENMASK(end - 1, start)); 518 519 bitmap += start / BITS_PER_LONG; 520 /* Opencode round_down() to not include math.h */ 521 end -= start & ~(BITS_PER_LONG - 1); 522 start %= BITS_PER_LONG; 523 w = bitmap_weight(bitmap, end); 524 if (start) 525 w -= hweight_long(*bitmap & BITMAP_LAST_WORD_MASK(start)); 526 527 return w; 528} 529 530static __always_inline 531void bitmap_set(unsigned long *map, unsigned int start, unsigned int nbits) 532{ 533 if (__builtin_constant_p(nbits) && nbits == 1) 534 __set_bit(start, map); 535 else if (small_const_nbits(start + nbits)) 536 *map |= GENMASK(start + nbits - 1, start); 537 else if (__builtin_constant_p(start & BITMAP_MEM_MASK) && 538 IS_ALIGNED(start, BITMAP_MEM_ALIGNMENT) && 539 __builtin_constant_p(nbits & BITMAP_MEM_MASK) && 540 IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT)) 541 memset((char *)map + start / 8, 0xff, nbits / 8); 542 else 543 __bitmap_set(map, start, nbits); 544} 545 546static __always_inline 547void bitmap_clear(unsigned long *map, unsigned int start, unsigned int nbits) 548{ 549 if (__builtin_constant_p(nbits) && nbits == 1) 550 __clear_bit(start, map); 551 else if (small_const_nbits(start + nbits)) 552 *map &= ~GENMASK(start + nbits - 1, start); 553 else if (__builtin_constant_p(start & BITMAP_MEM_MASK) && 554 IS_ALIGNED(start, BITMAP_MEM_ALIGNMENT) && 555 __builtin_constant_p(nbits & BITMAP_MEM_MASK) && 556 IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT)) 557 memset((char *)map + start / 8, 0, nbits / 8); 558 else 559 __bitmap_clear(map, start, nbits); 560} 561 562static __always_inline 563void bitmap_shift_right(unsigned long *dst, const unsigned long *src, 564 unsigned int shift, unsigned int nbits) 565{ 566 if (small_const_nbits(nbits)) 567 *dst = (*src & BITMAP_LAST_WORD_MASK(nbits)) >> shift; 568 else 569 __bitmap_shift_right(dst, src, shift, nbits); 570} 571 572static __always_inline 573void bitmap_shift_left(unsigned long *dst, const unsigned long *src, 574 unsigned int shift, unsigned int nbits) 575{ 576 if (small_const_nbits(nbits)) 577 *dst = (*src << shift) & BITMAP_LAST_WORD_MASK(nbits); 578 else 579 __bitmap_shift_left(dst, src, shift, nbits); 580} 581 582static __always_inline 583void bitmap_replace(unsigned long *dst, 584 const unsigned long *old, 585 const unsigned long *new, 586 const unsigned long *mask, 587 unsigned int nbits) 588{ 589 if (small_const_nbits(nbits)) 590 *dst = (*old & ~(*mask)) | (*new & *mask); 591 else 592 __bitmap_replace(dst, old, new, mask, nbits); 593} 594 595/** 596 * bitmap_scatter - Scatter a bitmap according to the given mask 597 * @dst: scattered bitmap 598 * @src: gathered bitmap 599 * @mask: mask representing bits to assign to in the scattered bitmap 600 * @nbits: number of bits in each of these bitmaps 601 * 602 * Scatters bitmap with sequential bits according to the given @mask. 603 * 604 * Example: 605 * If @src bitmap = 0x005a, with @mask = 0x1313, @dst will be 0x0302. 606 * 607 * Or in binary form 608 * @src @mask @dst 609 * 0000000001011010 0001001100010011 0000001100000010 610 * 611 * (Bits 0, 1, 2, 3, 4, 5 are copied to the bits 0, 1, 4, 8, 9, 12) 612 * 613 * A more 'visual' description of the operation:: 614 * 615 * src: 0000000001011010 616 * |||||| 617 * +------+||||| 618 * | +----+|||| 619 * | |+----+||| 620 * | || +-+|| 621 * | || | || 622 * mask: ...v..vv...v..vv 623 * ...0..11...0..10 624 * dst: 0000001100000010 625 * 626 * A relationship exists between bitmap_scatter() and bitmap_gather(). See 627 * bitmap_gather() for the bitmap gather detailed operations. TL;DR: 628 * bitmap_gather() can be seen as the 'reverse' bitmap_scatter() operation. 629 */ 630static __always_inline 631void bitmap_scatter(unsigned long *dst, const unsigned long *src, 632 const unsigned long *mask, unsigned int nbits) 633{ 634 unsigned int n = 0; 635 unsigned int bit; 636 637 bitmap_zero(dst, nbits); 638 639 for_each_set_bit(bit, mask, nbits) 640 __assign_bit(bit, dst, test_bit(n++, src)); 641} 642 643/** 644 * bitmap_gather - Gather a bitmap according to given mask 645 * @dst: gathered bitmap 646 * @src: scattered bitmap 647 * @mask: mask representing bits to extract from in the scattered bitmap 648 * @nbits: number of bits in each of these bitmaps 649 * 650 * Gathers bitmap with sparse bits according to the given @mask. 651 * 652 * Example: 653 * If @src bitmap = 0x0302, with @mask = 0x1313, @dst will be 0x001a. 654 * 655 * Or in binary form 656 * @src @mask @dst 657 * 0000001100000010 0001001100010011 0000000000011010 658 * 659 * (Bits 0, 1, 4, 8, 9, 12 are copied to the bits 0, 1, 2, 3, 4, 5) 660 * 661 * A more 'visual' description of the operation:: 662 * 663 * mask: ...v..vv...v..vv 664 * src: 0000001100000010 665 * ^ ^^ ^ 0 666 * | || | 10 667 * | || > 010 668 * | |+--> 1010 669 * | +--> 11010 670 * +----> 011010 671 * dst: 0000000000011010 672 * 673 * A relationship exists between bitmap_gather() and bitmap_scatter(). See 674 * bitmap_scatter() for the bitmap scatter detailed operations. TL;DR: 675 * bitmap_scatter() can be seen as the 'reverse' bitmap_gather() operation. 676 * 677 * Suppose scattered computed using bitmap_scatter(scattered, src, mask, n). 678 * The operation bitmap_gather(result, scattered, mask, n) leads to a result 679 * equal or equivalent to src. 680 * 681 * The result can be 'equivalent' because bitmap_scatter() and bitmap_gather() 682 * are not bijective. 683 * The result and src values are equivalent in that sense that a call to 684 * bitmap_scatter(res, src, mask, n) and a call to 685 * bitmap_scatter(res, result, mask, n) will lead to the same res value. 686 */ 687static __always_inline 688void bitmap_gather(unsigned long *dst, const unsigned long *src, 689 const unsigned long *mask, unsigned int nbits) 690{ 691 unsigned int n = 0; 692 unsigned int bit; 693 694 bitmap_zero(dst, nbits); 695 696 for_each_set_bit(bit, mask, nbits) 697 __assign_bit(n++, dst, test_bit(bit, src)); 698} 699 700static __always_inline 701void bitmap_next_set_region(unsigned long *bitmap, unsigned int *rs, 702 unsigned int *re, unsigned int end) 703{ 704 *rs = find_next_bit(bitmap, end, *rs); 705 *re = find_next_zero_bit(bitmap, end, *rs + 1); 706} 707 708/** 709 * bitmap_release_region - release allocated bitmap region 710 * @bitmap: array of unsigned longs corresponding to the bitmap 711 * @pos: beginning of bit region to release 712 * @order: region size (log base 2 of number of bits) to release 713 * 714 * This is the complement to __bitmap_find_free_region() and releases 715 * the found region (by clearing it in the bitmap). 716 */ 717static __always_inline 718void bitmap_release_region(unsigned long *bitmap, unsigned int pos, int order) 719{ 720 bitmap_clear(bitmap, pos, BIT(order)); 721} 722 723/** 724 * bitmap_allocate_region - allocate bitmap region 725 * @bitmap: array of unsigned longs corresponding to the bitmap 726 * @pos: beginning of bit region to allocate 727 * @order: region size (log base 2 of number of bits) to allocate 728 * 729 * Allocate (set bits in) a specified region of a bitmap. 730 * 731 * Returns: 0 on success, or %-EBUSY if specified region wasn't 732 * free (not all bits were zero). 733 */ 734static __always_inline 735int bitmap_allocate_region(unsigned long *bitmap, unsigned int pos, int order) 736{ 737 unsigned int len = BIT(order); 738 739 if (find_next_bit(bitmap, pos + len, pos) < pos + len) 740 return -EBUSY; 741 bitmap_set(bitmap, pos, len); 742 return 0; 743} 744 745/** 746 * bitmap_find_free_region - find a contiguous aligned mem region 747 * @bitmap: array of unsigned longs corresponding to the bitmap 748 * @bits: number of bits in the bitmap 749 * @order: region size (log base 2 of number of bits) to find 750 * 751 * Find a region of free (zero) bits in a @bitmap of @bits bits and 752 * allocate them (set them to one). Only consider regions of length 753 * a power (@order) of two, aligned to that power of two, which 754 * makes the search algorithm much faster. 755 * 756 * Returns: the bit offset in bitmap of the allocated region, 757 * or -errno on failure. 758 */ 759static __always_inline 760int bitmap_find_free_region(unsigned long *bitmap, unsigned int bits, int order) 761{ 762 unsigned int pos, end; /* scans bitmap by regions of size order */ 763 764 for (pos = 0; (end = pos + BIT(order)) <= bits; pos = end) { 765 if (!bitmap_allocate_region(bitmap, pos, order)) 766 return pos; 767 } 768 return -ENOMEM; 769} 770 771/** 772 * BITMAP_FROM_U64() - Represent u64 value in the format suitable for bitmap. 773 * @n: u64 value 774 * 775 * Linux bitmaps are internally arrays of unsigned longs, i.e. 32-bit 776 * integers in 32-bit environment, and 64-bit integers in 64-bit one. 777 * 778 * There are four combinations of endianness and length of the word in linux 779 * ABIs: LE64, BE64, LE32 and BE32. 780 * 781 * On 64-bit kernels 64-bit LE and BE numbers are naturally ordered in 782 * bitmaps and therefore don't require any special handling. 783 * 784 * On 32-bit kernels 32-bit LE ABI orders lo word of 64-bit number in memory 785 * prior to hi, and 32-bit BE orders hi word prior to lo. The bitmap on the 786 * other hand is represented as an array of 32-bit words and the position of 787 * bit N may therefore be calculated as: word #(N/32) and bit #(N%32) in that 788 * word. For example, bit #42 is located at 10th position of 2nd word. 789 * It matches 32-bit LE ABI, and we can simply let the compiler store 64-bit 790 * values in memory as it usually does. But for BE we need to swap hi and lo 791 * words manually. 792 * 793 * With all that, the macro BITMAP_FROM_U64() does explicit reordering of hi and 794 * lo parts of u64. For LE32 it does nothing, and for BE environment it swaps 795 * hi and lo words, as is expected by bitmap. 796 */ 797#if __BITS_PER_LONG == 64 798#define BITMAP_FROM_U64(n) (n) 799#else 800#define BITMAP_FROM_U64(n) ((unsigned long) ((u64)(n) & ULONG_MAX)), \ 801 ((unsigned long) ((u64)(n) >> 32)) 802#endif 803 804/** 805 * bitmap_from_u64 - Check and swap words within u64. 806 * @mask: source bitmap 807 * @dst: destination bitmap 808 * 809 * In 32-bit Big Endian kernel, when using ``(u32 *)(&val)[*]`` 810 * to read u64 mask, we will get the wrong word. 811 * That is ``(u32 *)(&val)[0]`` gets the upper 32 bits, 812 * but we expect the lower 32-bits of u64. 813 */ 814static __always_inline void bitmap_from_u64(unsigned long *dst, u64 mask) 815{ 816 bitmap_from_arr64(dst, &mask, 64); 817} 818 819/** 820 * bitmap_read - read a value of n-bits from the memory region 821 * @map: address to the bitmap memory region 822 * @start: bit offset of the n-bit value 823 * @nbits: size of value in bits, nonzero, up to BITS_PER_LONG 824 * 825 * Returns: value of @nbits bits located at the @start bit offset within the 826 * @map memory region. For @nbits = 0 and @nbits > BITS_PER_LONG the return 827 * value is undefined. 828 */ 829static __always_inline 830unsigned long bitmap_read(const unsigned long *map, unsigned long start, unsigned long nbits) 831{ 832 size_t index = BIT_WORD(start); 833 unsigned long offset = start % BITS_PER_LONG; 834 unsigned long space = BITS_PER_LONG - offset; 835 unsigned long value_low, value_high; 836 837 if (unlikely(!nbits || nbits > BITS_PER_LONG)) 838 return 0; 839 840 if (space >= nbits) 841 return (map[index] >> offset) & BITMAP_LAST_WORD_MASK(nbits); 842 843 value_low = map[index] & BITMAP_FIRST_WORD_MASK(start); 844 value_high = map[index + 1] & BITMAP_LAST_WORD_MASK(start + nbits); 845 return (value_low >> offset) | (value_high << space); 846} 847 848/** 849 * bitmap_write - write n-bit value within a memory region 850 * @map: address to the bitmap memory region 851 * @value: value to write, clamped to nbits 852 * @start: bit offset of the n-bit value 853 * @nbits: size of value in bits, nonzero, up to BITS_PER_LONG. 854 * 855 * bitmap_write() behaves as-if implemented as @nbits calls of __assign_bit(), 856 * i.e. bits beyond @nbits are ignored: 857 * 858 * for (bit = 0; bit < nbits; bit++) 859 * __assign_bit(start + bit, bitmap, val & BIT(bit)); 860 * 861 * For @nbits == 0 and @nbits > BITS_PER_LONG no writes are performed. 862 */ 863static __always_inline 864void bitmap_write(unsigned long *map, unsigned long value, 865 unsigned long start, unsigned long nbits) 866{ 867 size_t index; 868 unsigned long offset; 869 unsigned long space; 870 unsigned long mask; 871 bool fit; 872 873 if (unlikely(!nbits || nbits > BITS_PER_LONG)) 874 return; 875 876 mask = BITMAP_LAST_WORD_MASK(nbits); 877 value &= mask; 878 offset = start % BITS_PER_LONG; 879 space = BITS_PER_LONG - offset; 880 fit = space >= nbits; 881 index = BIT_WORD(start); 882 883 map[index] &= (fit ? (~(mask << offset)) : ~BITMAP_FIRST_WORD_MASK(start)); 884 map[index] |= value << offset; 885 if (fit) 886 return; 887 888 map[index + 1] &= BITMAP_FIRST_WORD_MASK(start + nbits); 889 map[index + 1] |= (value >> space); 890} 891 892#define bitmap_get_value8(map, start) \ 893 bitmap_read(map, start, BITS_PER_BYTE) 894#define bitmap_set_value8(map, value, start) \ 895 bitmap_write(map, value, start, BITS_PER_BYTE) 896 897#endif /* __ASSEMBLY__ */ 898 899#endif /* __LINUX_BITMAP_H */