xref: /linux/lib/bitmap.c (revision 3d0fe49454652117522f60bfbefb978ba0e5300b)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * lib/bitmap.c
4  * Helper functions for bitmap.h.
5  */
6 
7 #include <linux/bitmap.h>
8 #include <linux/bitops.h>
9 #include <linux/ctype.h>
10 #include <linux/device.h>
11 #include <linux/export.h>
12 #include <linux/slab.h>
13 
14 /**
15  * DOC: bitmap introduction
16  *
17  * bitmaps provide an array of bits, implemented using an
18  * array of unsigned longs.  The number of valid bits in a
19  * given bitmap does _not_ need to be an exact multiple of
20  * BITS_PER_LONG.
21  *
22  * The possible unused bits in the last, partially used word
23  * of a bitmap are 'don't care'.  The implementation makes
24  * no particular effort to keep them zero.  It ensures that
25  * their value will not affect the results of any operation.
26  * The bitmap operations that return Boolean (bitmap_empty,
27  * for example) or scalar (bitmap_weight, for example) results
28  * carefully filter out these unused bits from impacting their
29  * results.
30  *
31  * The byte ordering of bitmaps is more natural on little
32  * endian architectures.  See the big-endian headers
33  * include/asm-ppc64/bitops.h and include/asm-s390/bitops.h
34  * for the best explanations of this ordering.
35  */
36 
37 bool __bitmap_equal(const unsigned long *bitmap1,
38 		    const unsigned long *bitmap2, unsigned int bits)
39 {
40 	unsigned int k, lim = bits/BITS_PER_LONG;
41 	for (k = 0; k < lim; ++k)
42 		if (bitmap1[k] != bitmap2[k])
43 			return false;
44 
45 	if (bits % BITS_PER_LONG)
46 		if ((bitmap1[k] ^ bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
47 			return false;
48 
49 	return true;
50 }
51 EXPORT_SYMBOL(__bitmap_equal);
52 
53 bool __bitmap_or_equal(const unsigned long *bitmap1,
54 		       const unsigned long *bitmap2,
55 		       const unsigned long *bitmap3,
56 		       unsigned int bits)
57 {
58 	unsigned int k, lim = bits / BITS_PER_LONG;
59 	unsigned long tmp;
60 
61 	for (k = 0; k < lim; ++k) {
62 		if ((bitmap1[k] | bitmap2[k]) != bitmap3[k])
63 			return false;
64 	}
65 
66 	if (!(bits % BITS_PER_LONG))
67 		return true;
68 
69 	tmp = (bitmap1[k] | bitmap2[k]) ^ bitmap3[k];
70 	return (tmp & BITMAP_LAST_WORD_MASK(bits)) == 0;
71 }
72 
73 void __bitmap_complement(unsigned long *dst, const unsigned long *src, unsigned int bits)
74 {
75 	unsigned int k, lim = BITS_TO_LONGS(bits);
76 	for (k = 0; k < lim; ++k)
77 		dst[k] = ~src[k];
78 }
79 EXPORT_SYMBOL(__bitmap_complement);
80 
81 /**
82  * __bitmap_shift_right - logical right shift of the bits in a bitmap
83  *   @dst : destination bitmap
84  *   @src : source bitmap
85  *   @shift : shift by this many bits
86  *   @nbits : bitmap size, in bits
87  *
88  * Shifting right (dividing) means moving bits in the MS -> LS bit
89  * direction.  Zeros are fed into the vacated MS positions and the
90  * LS bits shifted off the bottom are lost.
91  */
92 void __bitmap_shift_right(unsigned long *dst, const unsigned long *src,
93 			unsigned shift, unsigned nbits)
94 {
95 	unsigned k, lim = BITS_TO_LONGS(nbits);
96 	unsigned off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG;
97 	unsigned long mask = BITMAP_LAST_WORD_MASK(nbits);
98 	for (k = 0; off + k < lim; ++k) {
99 		unsigned long upper, lower;
100 
101 		/*
102 		 * If shift is not word aligned, take lower rem bits of
103 		 * word above and make them the top rem bits of result.
104 		 */
105 		if (!rem || off + k + 1 >= lim)
106 			upper = 0;
107 		else {
108 			upper = src[off + k + 1];
109 			if (off + k + 1 == lim - 1)
110 				upper &= mask;
111 			upper <<= (BITS_PER_LONG - rem);
112 		}
113 		lower = src[off + k];
114 		if (off + k == lim - 1)
115 			lower &= mask;
116 		lower >>= rem;
117 		dst[k] = lower | upper;
118 	}
119 	if (off)
120 		memset(&dst[lim - off], 0, off*sizeof(unsigned long));
121 }
122 EXPORT_SYMBOL(__bitmap_shift_right);
123 
124 
125 /**
126  * __bitmap_shift_left - logical left shift of the bits in a bitmap
127  *   @dst : destination bitmap
128  *   @src : source bitmap
129  *   @shift : shift by this many bits
130  *   @nbits : bitmap size, in bits
131  *
132  * Shifting left (multiplying) means moving bits in the LS -> MS
133  * direction.  Zeros are fed into the vacated LS bit positions
134  * and those MS bits shifted off the top are lost.
135  */
136 
137 void __bitmap_shift_left(unsigned long *dst, const unsigned long *src,
138 			unsigned int shift, unsigned int nbits)
139 {
140 	int k;
141 	unsigned int lim = BITS_TO_LONGS(nbits);
142 	unsigned int off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG;
143 	for (k = lim - off - 1; k >= 0; --k) {
144 		unsigned long upper, lower;
145 
146 		/*
147 		 * If shift is not word aligned, take upper rem bits of
148 		 * word below and make them the bottom rem bits of result.
149 		 */
150 		if (rem && k > 0)
151 			lower = src[k - 1] >> (BITS_PER_LONG - rem);
152 		else
153 			lower = 0;
154 		upper = src[k] << rem;
155 		dst[k + off] = lower | upper;
156 	}
157 	if (off)
158 		memset(dst, 0, off*sizeof(unsigned long));
159 }
160 EXPORT_SYMBOL(__bitmap_shift_left);
161 
162 /**
163  * bitmap_cut() - remove bit region from bitmap and right shift remaining bits
164  * @dst: destination bitmap, might overlap with src
165  * @src: source bitmap
166  * @first: start bit of region to be removed
167  * @cut: number of bits to remove
168  * @nbits: bitmap size, in bits
169  *
170  * Set the n-th bit of @dst iff the n-th bit of @src is set and
171  * n is less than @first, or the m-th bit of @src is set for any
172  * m such that @first <= n < nbits, and m = n + @cut.
173  *
174  * In pictures, example for a big-endian 32-bit architecture:
175  *
176  * The @src bitmap is::
177  *
178  *   31                                   63
179  *   |                                    |
180  *   10000000 11000001 11110010 00010101  10000000 11000001 01110010 00010101
181  *                   |  |              |                                    |
182  *                  16  14             0                                   32
183  *
184  * if @cut is 3, and @first is 14, bits 14-16 in @src are cut and @dst is::
185  *
186  *   31                                   63
187  *   |                                    |
188  *   10110000 00011000 00110010 00010101  00010000 00011000 00101110 01000010
189  *                      |              |                                    |
190  *                      14 (bit 17     0                                   32
191  *                          from @src)
192  *
193  * Note that @dst and @src might overlap partially or entirely.
194  *
195  * This is implemented in the obvious way, with a shift and carry
196  * step for each moved bit. Optimisation is left as an exercise
197  * for the compiler.
198  */
199 void bitmap_cut(unsigned long *dst, const unsigned long *src,
200 		unsigned int first, unsigned int cut, unsigned int nbits)
201 {
202 	unsigned int len = BITS_TO_LONGS(nbits);
203 	unsigned long keep = 0, carry;
204 	int i;
205 
206 	if (first % BITS_PER_LONG) {
207 		keep = src[first / BITS_PER_LONG] &
208 		       (~0UL >> (BITS_PER_LONG - first % BITS_PER_LONG));
209 	}
210 
211 	memmove(dst, src, len * sizeof(*dst));
212 
213 	while (cut--) {
214 		for (i = first / BITS_PER_LONG; i < len; i++) {
215 			if (i < len - 1)
216 				carry = dst[i + 1] & 1UL;
217 			else
218 				carry = 0;
219 
220 			dst[i] = (dst[i] >> 1) | (carry << (BITS_PER_LONG - 1));
221 		}
222 	}
223 
224 	dst[first / BITS_PER_LONG] &= ~0UL << (first % BITS_PER_LONG);
225 	dst[first / BITS_PER_LONG] |= keep;
226 }
227 EXPORT_SYMBOL(bitmap_cut);
228 
229 bool __bitmap_and(unsigned long *dst, const unsigned long *bitmap1,
230 				const unsigned long *bitmap2, unsigned int bits)
231 {
232 	unsigned int k;
233 	unsigned int lim = bits/BITS_PER_LONG;
234 	unsigned long result = 0;
235 
236 	for (k = 0; k < lim; k++)
237 		result |= (dst[k] = bitmap1[k] & bitmap2[k]);
238 	if (bits % BITS_PER_LONG)
239 		result |= (dst[k] = bitmap1[k] & bitmap2[k] &
240 			   BITMAP_LAST_WORD_MASK(bits));
241 	return result != 0;
242 }
243 EXPORT_SYMBOL(__bitmap_and);
244 
245 void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1,
246 				const unsigned long *bitmap2, unsigned int bits)
247 {
248 	unsigned int k;
249 	unsigned int nr = BITS_TO_LONGS(bits);
250 
251 	for (k = 0; k < nr; k++)
252 		dst[k] = bitmap1[k] | bitmap2[k];
253 }
254 EXPORT_SYMBOL(__bitmap_or);
255 
256 void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1,
257 				const unsigned long *bitmap2, unsigned int bits)
258 {
259 	unsigned int k;
260 	unsigned int nr = BITS_TO_LONGS(bits);
261 
262 	for (k = 0; k < nr; k++)
263 		dst[k] = bitmap1[k] ^ bitmap2[k];
264 }
265 EXPORT_SYMBOL(__bitmap_xor);
266 
267 bool __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1,
268 				const unsigned long *bitmap2, unsigned int bits)
269 {
270 	unsigned int k;
271 	unsigned int lim = bits/BITS_PER_LONG;
272 	unsigned long result = 0;
273 
274 	for (k = 0; k < lim; k++)
275 		result |= (dst[k] = bitmap1[k] & ~bitmap2[k]);
276 	if (bits % BITS_PER_LONG)
277 		result |= (dst[k] = bitmap1[k] & ~bitmap2[k] &
278 			   BITMAP_LAST_WORD_MASK(bits));
279 	return result != 0;
280 }
281 EXPORT_SYMBOL(__bitmap_andnot);
282 
283 void __bitmap_replace(unsigned long *dst,
284 		      const unsigned long *old, const unsigned long *new,
285 		      const unsigned long *mask, unsigned int nbits)
286 {
287 	unsigned int k;
288 	unsigned int nr = BITS_TO_LONGS(nbits);
289 
290 	for (k = 0; k < nr; k++)
291 		dst[k] = (old[k] & ~mask[k]) | (new[k] & mask[k]);
292 }
293 EXPORT_SYMBOL(__bitmap_replace);
294 
295 bool __bitmap_intersects(const unsigned long *bitmap1,
296 			 const unsigned long *bitmap2, unsigned int bits)
297 {
298 	unsigned int k, lim = bits/BITS_PER_LONG;
299 	for (k = 0; k < lim; ++k)
300 		if (bitmap1[k] & bitmap2[k])
301 			return true;
302 
303 	if (bits % BITS_PER_LONG)
304 		if ((bitmap1[k] & bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
305 			return true;
306 	return false;
307 }
308 EXPORT_SYMBOL(__bitmap_intersects);
309 
310 bool __bitmap_subset(const unsigned long *bitmap1,
311 		     const unsigned long *bitmap2, unsigned int bits)
312 {
313 	unsigned int k, lim = bits/BITS_PER_LONG;
314 	for (k = 0; k < lim; ++k)
315 		if (bitmap1[k] & ~bitmap2[k])
316 			return false;
317 
318 	if (bits % BITS_PER_LONG)
319 		if ((bitmap1[k] & ~bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
320 			return false;
321 	return true;
322 }
323 EXPORT_SYMBOL(__bitmap_subset);
324 
325 #define BITMAP_WEIGHT(FETCH, bits)	\
326 ({										\
327 	unsigned int __bits = (bits), idx, w = 0;				\
328 										\
329 	for (idx = 0; idx < __bits / BITS_PER_LONG; idx++)			\
330 		w += hweight_long(FETCH);					\
331 										\
332 	if (__bits % BITS_PER_LONG)						\
333 		w += hweight_long((FETCH) & BITMAP_LAST_WORD_MASK(__bits));	\
334 										\
335 	w;									\
336 })
337 
338 unsigned int __bitmap_weight(const unsigned long *bitmap, unsigned int bits)
339 {
340 	return BITMAP_WEIGHT(bitmap[idx], bits);
341 }
342 EXPORT_SYMBOL(__bitmap_weight);
343 
344 unsigned int __bitmap_weight_and(const unsigned long *bitmap1,
345 				const unsigned long *bitmap2, unsigned int bits)
346 {
347 	return BITMAP_WEIGHT(bitmap1[idx] & bitmap2[idx], bits);
348 }
349 EXPORT_SYMBOL(__bitmap_weight_and);
350 
351 void __bitmap_set(unsigned long *map, unsigned int start, int len)
352 {
353 	unsigned long *p = map + BIT_WORD(start);
354 	const unsigned int size = start + len;
355 	int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG);
356 	unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start);
357 
358 	while (len - bits_to_set >= 0) {
359 		*p |= mask_to_set;
360 		len -= bits_to_set;
361 		bits_to_set = BITS_PER_LONG;
362 		mask_to_set = ~0UL;
363 		p++;
364 	}
365 	if (len) {
366 		mask_to_set &= BITMAP_LAST_WORD_MASK(size);
367 		*p |= mask_to_set;
368 	}
369 }
370 EXPORT_SYMBOL(__bitmap_set);
371 
372 void __bitmap_clear(unsigned long *map, unsigned int start, int len)
373 {
374 	unsigned long *p = map + BIT_WORD(start);
375 	const unsigned int size = start + len;
376 	int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG);
377 	unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start);
378 
379 	while (len - bits_to_clear >= 0) {
380 		*p &= ~mask_to_clear;
381 		len -= bits_to_clear;
382 		bits_to_clear = BITS_PER_LONG;
383 		mask_to_clear = ~0UL;
384 		p++;
385 	}
386 	if (len) {
387 		mask_to_clear &= BITMAP_LAST_WORD_MASK(size);
388 		*p &= ~mask_to_clear;
389 	}
390 }
391 EXPORT_SYMBOL(__bitmap_clear);
392 
393 /**
394  * bitmap_find_next_zero_area_off - find a contiguous aligned zero area
395  * @map: The address to base the search on
396  * @size: The bitmap size in bits
397  * @start: The bitnumber to start searching at
398  * @nr: The number of zeroed bits we're looking for
399  * @align_mask: Alignment mask for zero area
400  * @align_offset: Alignment offset for zero area.
401  *
402  * The @align_mask should be one less than a power of 2; the effect is that
403  * the bit offset of all zero areas this function finds plus @align_offset
404  * is multiple of that power of 2.
405  */
406 unsigned long bitmap_find_next_zero_area_off(unsigned long *map,
407 					     unsigned long size,
408 					     unsigned long start,
409 					     unsigned int nr,
410 					     unsigned long align_mask,
411 					     unsigned long align_offset)
412 {
413 	unsigned long index, end, i;
414 again:
415 	index = find_next_zero_bit(map, size, start);
416 
417 	/* Align allocation */
418 	index = __ALIGN_MASK(index + align_offset, align_mask) - align_offset;
419 
420 	end = index + nr;
421 	if (end > size)
422 		return end;
423 	i = find_next_bit(map, end, index);
424 	if (i < end) {
425 		start = i + 1;
426 		goto again;
427 	}
428 	return index;
429 }
430 EXPORT_SYMBOL(bitmap_find_next_zero_area_off);
431 
432 /**
433  * bitmap_pos_to_ord - find ordinal of set bit at given position in bitmap
434  *	@buf: pointer to a bitmap
435  *	@pos: a bit position in @buf (0 <= @pos < @nbits)
436  *	@nbits: number of valid bit positions in @buf
437  *
438  * Map the bit at position @pos in @buf (of length @nbits) to the
439  * ordinal of which set bit it is.  If it is not set or if @pos
440  * is not a valid bit position, map to -1.
441  *
442  * If for example, just bits 4 through 7 are set in @buf, then @pos
443  * values 4 through 7 will get mapped to 0 through 3, respectively,
444  * and other @pos values will get mapped to -1.  When @pos value 7
445  * gets mapped to (returns) @ord value 3 in this example, that means
446  * that bit 7 is the 3rd (starting with 0th) set bit in @buf.
447  *
448  * The bit positions 0 through @bits are valid positions in @buf.
449  */
450 static int bitmap_pos_to_ord(const unsigned long *buf, unsigned int pos, unsigned int nbits)
451 {
452 	if (pos >= nbits || !test_bit(pos, buf))
453 		return -1;
454 
455 	return bitmap_weight(buf, pos);
456 }
457 
458 /**
459  * bitmap_remap - Apply map defined by a pair of bitmaps to another bitmap
460  *	@dst: remapped result
461  *	@src: subset to be remapped
462  *	@old: defines domain of map
463  *	@new: defines range of map
464  *	@nbits: number of bits in each of these bitmaps
465  *
466  * Let @old and @new define a mapping of bit positions, such that
467  * whatever position is held by the n-th set bit in @old is mapped
468  * to the n-th set bit in @new.  In the more general case, allowing
469  * for the possibility that the weight 'w' of @new is less than the
470  * weight of @old, map the position of the n-th set bit in @old to
471  * the position of the m-th set bit in @new, where m == n % w.
472  *
473  * If either of the @old and @new bitmaps are empty, or if @src and
474  * @dst point to the same location, then this routine copies @src
475  * to @dst.
476  *
477  * The positions of unset bits in @old are mapped to themselves
478  * (the identity map).
479  *
480  * Apply the above specified mapping to @src, placing the result in
481  * @dst, clearing any bits previously set in @dst.
482  *
483  * For example, lets say that @old has bits 4 through 7 set, and
484  * @new has bits 12 through 15 set.  This defines the mapping of bit
485  * position 4 to 12, 5 to 13, 6 to 14 and 7 to 15, and of all other
486  * bit positions unchanged.  So if say @src comes into this routine
487  * with bits 1, 5 and 7 set, then @dst should leave with bits 1,
488  * 13 and 15 set.
489  */
490 void bitmap_remap(unsigned long *dst, const unsigned long *src,
491 		const unsigned long *old, const unsigned long *new,
492 		unsigned int nbits)
493 {
494 	unsigned int oldbit, w;
495 
496 	if (dst == src)		/* following doesn't handle inplace remaps */
497 		return;
498 	bitmap_zero(dst, nbits);
499 
500 	w = bitmap_weight(new, nbits);
501 	for_each_set_bit(oldbit, src, nbits) {
502 		int n = bitmap_pos_to_ord(old, oldbit, nbits);
503 
504 		if (n < 0 || w == 0)
505 			set_bit(oldbit, dst);	/* identity map */
506 		else
507 			set_bit(find_nth_bit(new, nbits, n % w), dst);
508 	}
509 }
510 EXPORT_SYMBOL(bitmap_remap);
511 
512 /**
513  * bitmap_bitremap - Apply map defined by a pair of bitmaps to a single bit
514  *	@oldbit: bit position to be mapped
515  *	@old: defines domain of map
516  *	@new: defines range of map
517  *	@bits: number of bits in each of these bitmaps
518  *
519  * Let @old and @new define a mapping of bit positions, such that
520  * whatever position is held by the n-th set bit in @old is mapped
521  * to the n-th set bit in @new.  In the more general case, allowing
522  * for the possibility that the weight 'w' of @new is less than the
523  * weight of @old, map the position of the n-th set bit in @old to
524  * the position of the m-th set bit in @new, where m == n % w.
525  *
526  * The positions of unset bits in @old are mapped to themselves
527  * (the identity map).
528  *
529  * Apply the above specified mapping to bit position @oldbit, returning
530  * the new bit position.
531  *
532  * For example, lets say that @old has bits 4 through 7 set, and
533  * @new has bits 12 through 15 set.  This defines the mapping of bit
534  * position 4 to 12, 5 to 13, 6 to 14 and 7 to 15, and of all other
535  * bit positions unchanged.  So if say @oldbit is 5, then this routine
536  * returns 13.
537  */
538 int bitmap_bitremap(int oldbit, const unsigned long *old,
539 				const unsigned long *new, int bits)
540 {
541 	int w = bitmap_weight(new, bits);
542 	int n = bitmap_pos_to_ord(old, oldbit, bits);
543 	if (n < 0 || w == 0)
544 		return oldbit;
545 	else
546 		return find_nth_bit(new, bits, n % w);
547 }
548 EXPORT_SYMBOL(bitmap_bitremap);
549 
550 #ifdef CONFIG_NUMA
551 /**
552  * bitmap_onto - translate one bitmap relative to another
553  *	@dst: resulting translated bitmap
554  * 	@orig: original untranslated bitmap
555  * 	@relmap: bitmap relative to which translated
556  *	@bits: number of bits in each of these bitmaps
557  *
558  * Set the n-th bit of @dst iff there exists some m such that the
559  * n-th bit of @relmap is set, the m-th bit of @orig is set, and
560  * the n-th bit of @relmap is also the m-th _set_ bit of @relmap.
561  * (If you understood the previous sentence the first time your
562  * read it, you're overqualified for your current job.)
563  *
564  * In other words, @orig is mapped onto (surjectively) @dst,
565  * using the map { <n, m> | the n-th bit of @relmap is the
566  * m-th set bit of @relmap }.
567  *
568  * Any set bits in @orig above bit number W, where W is the
569  * weight of (number of set bits in) @relmap are mapped nowhere.
570  * In particular, if for all bits m set in @orig, m >= W, then
571  * @dst will end up empty.  In situations where the possibility
572  * of such an empty result is not desired, one way to avoid it is
573  * to use the bitmap_fold() operator, below, to first fold the
574  * @orig bitmap over itself so that all its set bits x are in the
575  * range 0 <= x < W.  The bitmap_fold() operator does this by
576  * setting the bit (m % W) in @dst, for each bit (m) set in @orig.
577  *
578  * Example [1] for bitmap_onto():
579  *  Let's say @relmap has bits 30-39 set, and @orig has bits
580  *  1, 3, 5, 7, 9 and 11 set.  Then on return from this routine,
581  *  @dst will have bits 31, 33, 35, 37 and 39 set.
582  *
583  *  When bit 0 is set in @orig, it means turn on the bit in
584  *  @dst corresponding to whatever is the first bit (if any)
585  *  that is turned on in @relmap.  Since bit 0 was off in the
586  *  above example, we leave off that bit (bit 30) in @dst.
587  *
588  *  When bit 1 is set in @orig (as in the above example), it
589  *  means turn on the bit in @dst corresponding to whatever
590  *  is the second bit that is turned on in @relmap.  The second
591  *  bit in @relmap that was turned on in the above example was
592  *  bit 31, so we turned on bit 31 in @dst.
593  *
594  *  Similarly, we turned on bits 33, 35, 37 and 39 in @dst,
595  *  because they were the 4th, 6th, 8th and 10th set bits
596  *  set in @relmap, and the 4th, 6th, 8th and 10th bits of
597  *  @orig (i.e. bits 3, 5, 7 and 9) were also set.
598  *
599  *  When bit 11 is set in @orig, it means turn on the bit in
600  *  @dst corresponding to whatever is the twelfth bit that is
601  *  turned on in @relmap.  In the above example, there were
602  *  only ten bits turned on in @relmap (30..39), so that bit
603  *  11 was set in @orig had no affect on @dst.
604  *
605  * Example [2] for bitmap_fold() + bitmap_onto():
606  *  Let's say @relmap has these ten bits set::
607  *
608  *		40 41 42 43 45 48 53 61 74 95
609  *
610  *  (for the curious, that's 40 plus the first ten terms of the
611  *  Fibonacci sequence.)
612  *
613  *  Further lets say we use the following code, invoking
614  *  bitmap_fold() then bitmap_onto, as suggested above to
615  *  avoid the possibility of an empty @dst result::
616  *
617  *	unsigned long *tmp;	// a temporary bitmap's bits
618  *
619  *	bitmap_fold(tmp, orig, bitmap_weight(relmap, bits), bits);
620  *	bitmap_onto(dst, tmp, relmap, bits);
621  *
622  *  Then this table shows what various values of @dst would be, for
623  *  various @orig's.  I list the zero-based positions of each set bit.
624  *  The tmp column shows the intermediate result, as computed by
625  *  using bitmap_fold() to fold the @orig bitmap modulo ten
626  *  (the weight of @relmap):
627  *
628  *      =============== ============== =================
629  *      @orig           tmp            @dst
630  *      0                0             40
631  *      1                1             41
632  *      9                9             95
633  *      10               0             40 [#f1]_
634  *      1 3 5 7          1 3 5 7       41 43 48 61
635  *      0 1 2 3 4        0 1 2 3 4     40 41 42 43 45
636  *      0 9 18 27        0 9 8 7       40 61 74 95
637  *      0 10 20 30       0             40
638  *      0 11 22 33       0 1 2 3       40 41 42 43
639  *      0 12 24 36       0 2 4 6       40 42 45 53
640  *      78 102 211       1 2 8         41 42 74 [#f1]_
641  *      =============== ============== =================
642  *
643  * .. [#f1]
644  *
645  *     For these marked lines, if we hadn't first done bitmap_fold()
646  *     into tmp, then the @dst result would have been empty.
647  *
648  * If either of @orig or @relmap is empty (no set bits), then @dst
649  * will be returned empty.
650  *
651  * If (as explained above) the only set bits in @orig are in positions
652  * m where m >= W, (where W is the weight of @relmap) then @dst will
653  * once again be returned empty.
654  *
655  * All bits in @dst not set by the above rule are cleared.
656  */
657 void bitmap_onto(unsigned long *dst, const unsigned long *orig,
658 			const unsigned long *relmap, unsigned int bits)
659 {
660 	unsigned int n, m;	/* same meaning as in above comment */
661 
662 	if (dst == orig)	/* following doesn't handle inplace mappings */
663 		return;
664 	bitmap_zero(dst, bits);
665 
666 	/*
667 	 * The following code is a more efficient, but less
668 	 * obvious, equivalent to the loop:
669 	 *	for (m = 0; m < bitmap_weight(relmap, bits); m++) {
670 	 *		n = find_nth_bit(orig, bits, m);
671 	 *		if (test_bit(m, orig))
672 	 *			set_bit(n, dst);
673 	 *	}
674 	 */
675 
676 	m = 0;
677 	for_each_set_bit(n, relmap, bits) {
678 		/* m == bitmap_pos_to_ord(relmap, n, bits) */
679 		if (test_bit(m, orig))
680 			set_bit(n, dst);
681 		m++;
682 	}
683 }
684 
685 /**
686  * bitmap_fold - fold larger bitmap into smaller, modulo specified size
687  *	@dst: resulting smaller bitmap
688  *	@orig: original larger bitmap
689  *	@sz: specified size
690  *	@nbits: number of bits in each of these bitmaps
691  *
692  * For each bit oldbit in @orig, set bit oldbit mod @sz in @dst.
693  * Clear all other bits in @dst.  See further the comment and
694  * Example [2] for bitmap_onto() for why and how to use this.
695  */
696 void bitmap_fold(unsigned long *dst, const unsigned long *orig,
697 			unsigned int sz, unsigned int nbits)
698 {
699 	unsigned int oldbit;
700 
701 	if (dst == orig)	/* following doesn't handle inplace mappings */
702 		return;
703 	bitmap_zero(dst, nbits);
704 
705 	for_each_set_bit(oldbit, orig, nbits)
706 		set_bit(oldbit % sz, dst);
707 }
708 #endif /* CONFIG_NUMA */
709 
710 unsigned long *bitmap_alloc(unsigned int nbits, gfp_t flags)
711 {
712 	return kmalloc_array(BITS_TO_LONGS(nbits), sizeof(unsigned long),
713 			     flags);
714 }
715 EXPORT_SYMBOL(bitmap_alloc);
716 
717 unsigned long *bitmap_zalloc(unsigned int nbits, gfp_t flags)
718 {
719 	return bitmap_alloc(nbits, flags | __GFP_ZERO);
720 }
721 EXPORT_SYMBOL(bitmap_zalloc);
722 
723 unsigned long *bitmap_alloc_node(unsigned int nbits, gfp_t flags, int node)
724 {
725 	return kmalloc_array_node(BITS_TO_LONGS(nbits), sizeof(unsigned long),
726 				  flags, node);
727 }
728 EXPORT_SYMBOL(bitmap_alloc_node);
729 
730 unsigned long *bitmap_zalloc_node(unsigned int nbits, gfp_t flags, int node)
731 {
732 	return bitmap_alloc_node(nbits, flags | __GFP_ZERO, node);
733 }
734 EXPORT_SYMBOL(bitmap_zalloc_node);
735 
736 void bitmap_free(const unsigned long *bitmap)
737 {
738 	kfree(bitmap);
739 }
740 EXPORT_SYMBOL(bitmap_free);
741 
742 static void devm_bitmap_free(void *data)
743 {
744 	unsigned long *bitmap = data;
745 
746 	bitmap_free(bitmap);
747 }
748 
749 unsigned long *devm_bitmap_alloc(struct device *dev,
750 				 unsigned int nbits, gfp_t flags)
751 {
752 	unsigned long *bitmap;
753 	int ret;
754 
755 	bitmap = bitmap_alloc(nbits, flags);
756 	if (!bitmap)
757 		return NULL;
758 
759 	ret = devm_add_action_or_reset(dev, devm_bitmap_free, bitmap);
760 	if (ret)
761 		return NULL;
762 
763 	return bitmap;
764 }
765 EXPORT_SYMBOL_GPL(devm_bitmap_alloc);
766 
767 unsigned long *devm_bitmap_zalloc(struct device *dev,
768 				  unsigned int nbits, gfp_t flags)
769 {
770 	return devm_bitmap_alloc(dev, nbits, flags | __GFP_ZERO);
771 }
772 EXPORT_SYMBOL_GPL(devm_bitmap_zalloc);
773 
774 #if BITS_PER_LONG == 64
775 /**
776  * bitmap_from_arr32 - copy the contents of u32 array of bits to bitmap
777  *	@bitmap: array of unsigned longs, the destination bitmap
778  *	@buf: array of u32 (in host byte order), the source bitmap
779  *	@nbits: number of bits in @bitmap
780  */
781 void bitmap_from_arr32(unsigned long *bitmap, const u32 *buf, unsigned int nbits)
782 {
783 	unsigned int i, halfwords;
784 
785 	halfwords = DIV_ROUND_UP(nbits, 32);
786 	for (i = 0; i < halfwords; i++) {
787 		bitmap[i/2] = (unsigned long) buf[i];
788 		if (++i < halfwords)
789 			bitmap[i/2] |= ((unsigned long) buf[i]) << 32;
790 	}
791 
792 	/* Clear tail bits in last word beyond nbits. */
793 	if (nbits % BITS_PER_LONG)
794 		bitmap[(halfwords - 1) / 2] &= BITMAP_LAST_WORD_MASK(nbits);
795 }
796 EXPORT_SYMBOL(bitmap_from_arr32);
797 
798 /**
799  * bitmap_to_arr32 - copy the contents of bitmap to a u32 array of bits
800  *	@buf: array of u32 (in host byte order), the dest bitmap
801  *	@bitmap: array of unsigned longs, the source bitmap
802  *	@nbits: number of bits in @bitmap
803  */
804 void bitmap_to_arr32(u32 *buf, const unsigned long *bitmap, unsigned int nbits)
805 {
806 	unsigned int i, halfwords;
807 
808 	halfwords = DIV_ROUND_UP(nbits, 32);
809 	for (i = 0; i < halfwords; i++) {
810 		buf[i] = (u32) (bitmap[i/2] & UINT_MAX);
811 		if (++i < halfwords)
812 			buf[i] = (u32) (bitmap[i/2] >> 32);
813 	}
814 
815 	/* Clear tail bits in last element of array beyond nbits. */
816 	if (nbits % BITS_PER_LONG)
817 		buf[halfwords - 1] &= (u32) (UINT_MAX >> ((-nbits) & 31));
818 }
819 EXPORT_SYMBOL(bitmap_to_arr32);
820 #endif
821 
822 #if BITS_PER_LONG == 32
823 /**
824  * bitmap_from_arr64 - copy the contents of u64 array of bits to bitmap
825  *	@bitmap: array of unsigned longs, the destination bitmap
826  *	@buf: array of u64 (in host byte order), the source bitmap
827  *	@nbits: number of bits in @bitmap
828  */
829 void bitmap_from_arr64(unsigned long *bitmap, const u64 *buf, unsigned int nbits)
830 {
831 	int n;
832 
833 	for (n = nbits; n > 0; n -= 64) {
834 		u64 val = *buf++;
835 
836 		*bitmap++ = val;
837 		if (n > 32)
838 			*bitmap++ = val >> 32;
839 	}
840 
841 	/*
842 	 * Clear tail bits in the last word beyond nbits.
843 	 *
844 	 * Negative index is OK because here we point to the word next
845 	 * to the last word of the bitmap, except for nbits == 0, which
846 	 * is tested implicitly.
847 	 */
848 	if (nbits % BITS_PER_LONG)
849 		bitmap[-1] &= BITMAP_LAST_WORD_MASK(nbits);
850 }
851 EXPORT_SYMBOL(bitmap_from_arr64);
852 
853 /**
854  * bitmap_to_arr64 - copy the contents of bitmap to a u64 array of bits
855  *	@buf: array of u64 (in host byte order), the dest bitmap
856  *	@bitmap: array of unsigned longs, the source bitmap
857  *	@nbits: number of bits in @bitmap
858  */
859 void bitmap_to_arr64(u64 *buf, const unsigned long *bitmap, unsigned int nbits)
860 {
861 	const unsigned long *end = bitmap + BITS_TO_LONGS(nbits);
862 
863 	while (bitmap < end) {
864 		*buf = *bitmap++;
865 		if (bitmap < end)
866 			*buf |= (u64)(*bitmap++) << 32;
867 		buf++;
868 	}
869 
870 	/* Clear tail bits in the last element of array beyond nbits. */
871 	if (nbits % 64)
872 		buf[-1] &= GENMASK_ULL((nbits - 1) % 64, 0);
873 }
874 EXPORT_SYMBOL(bitmap_to_arr64);
875 #endif
876