xref: /freebsd/sys/compat/linuxkpi/common/include/linux/bitops.h (revision f6313575401b3e97469df997e8b9d1a18fb485d0)
1 /*-
2  * Copyright (c) 2010 Isilon Systems, Inc.
3  * Copyright (c) 2010 iX Systems, Inc.
4  * Copyright (c) 2010 Panasas, Inc.
5  * Copyright (c) 2013-2015 Mellanox Technologies, Ltd.
6  * All rights reserved.
7  *
8  * Redistribution and use in source and binary forms, with or without
9  * modification, are permitted provided that the following conditions
10  * are met:
11  * 1. Redistributions of source code must retain the above copyright
12  *    notice unmodified, this list of conditions, and the following
13  *    disclaimer.
14  * 2. Redistributions in binary form must reproduce the above copyright
15  *    notice, this list of conditions and the following disclaimer in the
16  *    documentation and/or other materials provided with the distribution.
17  *
18  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28  *
29  * $FreeBSD$
30  */
31 #ifndef	_LINUX_BITOPS_H_
32 #define	_LINUX_BITOPS_H_
33 
34 #include <sys/param.h>
35 #include <sys/types.h>
36 #include <sys/systm.h>
37 #include <sys/errno.h>
38 
39 #define	BIT(nr)			(1UL << (nr))
40 #define	BIT_ULL(nr)		(1ULL << (nr))
41 #ifdef __LP64__
42 #define	BITS_PER_LONG		64
43 #else
44 #define	BITS_PER_LONG		32
45 #endif
46 #define	BITMAP_FIRST_WORD_MASK(start)	(~0UL << ((start) % BITS_PER_LONG))
47 #define	BITMAP_LAST_WORD_MASK(n)	(~0UL >> (BITS_PER_LONG - (n)))
48 #define	BITS_TO_LONGS(n)	howmany((n), BITS_PER_LONG)
49 #define	BIT_MASK(nr)		(1UL << ((nr) & (BITS_PER_LONG - 1)))
50 #define BIT_WORD(nr)		((nr) / BITS_PER_LONG)
51 #define	GENMASK(h, l)		(((~0UL) >> (BITS_PER_LONG - (h) - 1)) & ((~0UL) << (l)))
52 #define BITS_PER_BYTE           8
53 
54 static inline int
55 __ffs(int mask)
56 {
57 	return (ffs(mask) - 1);
58 }
59 
60 static inline int
61 __fls(int mask)
62 {
63 	return (fls(mask) - 1);
64 }
65 
66 static inline int
67 __ffsl(long mask)
68 {
69 	return (ffsl(mask) - 1);
70 }
71 
72 static inline int
73 __flsl(long mask)
74 {
75 	return (flsl(mask) - 1);
76 }
77 
78 static inline uint32_t
79 ror32(uint32_t word, unsigned int shift)
80 {
81 
82 	return ((word >> shift) | (word << (32 - shift)));
83 }
84 
85 #define	ffz(mask)	__ffs(~(mask))
86 
87 static inline int get_count_order(unsigned int count)
88 {
89         int order;
90 
91         order = fls(count) - 1;
92         if (count & (count - 1))
93                 order++;
94         return order;
95 }
96 
97 static inline unsigned long
98 find_first_bit(const unsigned long *addr, unsigned long size)
99 {
100 	long mask;
101 	int bit;
102 
103 	for (bit = 0; size >= BITS_PER_LONG;
104 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
105 		if (*addr == 0)
106 			continue;
107 		return (bit + __ffsl(*addr));
108 	}
109 	if (size) {
110 		mask = (*addr) & BITMAP_LAST_WORD_MASK(size);
111 		if (mask)
112 			bit += __ffsl(mask);
113 		else
114 			bit += size;
115 	}
116 	return (bit);
117 }
118 
119 static inline unsigned long
120 find_first_zero_bit(const unsigned long *addr, unsigned long size)
121 {
122 	long mask;
123 	int bit;
124 
125 	for (bit = 0; size >= BITS_PER_LONG;
126 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
127 		if (~(*addr) == 0)
128 			continue;
129 		return (bit + __ffsl(~(*addr)));
130 	}
131 	if (size) {
132 		mask = ~(*addr) & BITMAP_LAST_WORD_MASK(size);
133 		if (mask)
134 			bit += __ffsl(mask);
135 		else
136 			bit += size;
137 	}
138 	return (bit);
139 }
140 
141 static inline unsigned long
142 find_last_bit(const unsigned long *addr, unsigned long size)
143 {
144 	long mask;
145 	int offs;
146 	int bit;
147 	int pos;
148 
149 	pos = size / BITS_PER_LONG;
150 	offs = size % BITS_PER_LONG;
151 	bit = BITS_PER_LONG * pos;
152 	addr += pos;
153 	if (offs) {
154 		mask = (*addr) & BITMAP_LAST_WORD_MASK(offs);
155 		if (mask)
156 			return (bit + __flsl(mask));
157 	}
158 	while (pos--) {
159 		addr--;
160 		bit -= BITS_PER_LONG;
161 		if (*addr)
162 			return (bit + __flsl(*addr));
163 	}
164 	return (size);
165 }
166 
167 static inline unsigned long
168 find_next_bit(const unsigned long *addr, unsigned long size, unsigned long offset)
169 {
170 	long mask;
171 	int offs;
172 	int bit;
173 	int pos;
174 
175 	if (offset >= size)
176 		return (size);
177 	pos = offset / BITS_PER_LONG;
178 	offs = offset % BITS_PER_LONG;
179 	bit = BITS_PER_LONG * pos;
180 	addr += pos;
181 	if (offs) {
182 		mask = (*addr) & ~BITMAP_LAST_WORD_MASK(offs);
183 		if (mask)
184 			return (bit + __ffsl(mask));
185 		if (size - bit <= BITS_PER_LONG)
186 			return (size);
187 		bit += BITS_PER_LONG;
188 		addr++;
189 	}
190 	for (size -= bit; size >= BITS_PER_LONG;
191 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
192 		if (*addr == 0)
193 			continue;
194 		return (bit + __ffsl(*addr));
195 	}
196 	if (size) {
197 		mask = (*addr) & BITMAP_LAST_WORD_MASK(size);
198 		if (mask)
199 			bit += __ffsl(mask);
200 		else
201 			bit += size;
202 	}
203 	return (bit);
204 }
205 
206 static inline unsigned long
207 find_next_zero_bit(const unsigned long *addr, unsigned long size,
208     unsigned long offset)
209 {
210 	long mask;
211 	int offs;
212 	int bit;
213 	int pos;
214 
215 	if (offset >= size)
216 		return (size);
217 	pos = offset / BITS_PER_LONG;
218 	offs = offset % BITS_PER_LONG;
219 	bit = BITS_PER_LONG * pos;
220 	addr += pos;
221 	if (offs) {
222 		mask = ~(*addr) & ~BITMAP_LAST_WORD_MASK(offs);
223 		if (mask)
224 			return (bit + __ffsl(mask));
225 		if (size - bit <= BITS_PER_LONG)
226 			return (size);
227 		bit += BITS_PER_LONG;
228 		addr++;
229 	}
230 	for (size -= bit; size >= BITS_PER_LONG;
231 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
232 		if (~(*addr) == 0)
233 			continue;
234 		return (bit + __ffsl(~(*addr)));
235 	}
236 	if (size) {
237 		mask = ~(*addr) & BITMAP_LAST_WORD_MASK(size);
238 		if (mask)
239 			bit += __ffsl(mask);
240 		else
241 			bit += size;
242 	}
243 	return (bit);
244 }
245 
246 static inline void
247 bitmap_zero(unsigned long *addr, int size)
248 {
249 	int len;
250 
251 	len = BITS_TO_LONGS(size) * sizeof(long);
252 	memset(addr, 0, len);
253 }
254 
255 static inline void
256 bitmap_fill(unsigned long *addr, int size)
257 {
258 	int tail;
259 	int len;
260 
261 	len = (size / BITS_PER_LONG) * sizeof(long);
262 	memset(addr, 0xff, len);
263 	tail = size & (BITS_PER_LONG - 1);
264 	if (tail)
265 		addr[size / BITS_PER_LONG] = BITMAP_LAST_WORD_MASK(tail);
266 }
267 
268 static inline int
269 bitmap_full(unsigned long *addr, int size)
270 {
271 	unsigned long mask;
272 	int tail;
273 	int len;
274 	int i;
275 
276 	len = size / BITS_PER_LONG;
277 	for (i = 0; i < len; i++)
278 		if (addr[i] != ~0UL)
279 			return (0);
280 	tail = size & (BITS_PER_LONG - 1);
281 	if (tail) {
282 		mask = BITMAP_LAST_WORD_MASK(tail);
283 		if ((addr[i] & mask) != mask)
284 			return (0);
285 	}
286 	return (1);
287 }
288 
289 static inline int
290 bitmap_empty(unsigned long *addr, int size)
291 {
292 	unsigned long mask;
293 	int tail;
294 	int len;
295 	int i;
296 
297 	len = size / BITS_PER_LONG;
298 	for (i = 0; i < len; i++)
299 		if (addr[i] != 0)
300 			return (0);
301 	tail = size & (BITS_PER_LONG - 1);
302 	if (tail) {
303 		mask = BITMAP_LAST_WORD_MASK(tail);
304 		if ((addr[i] & mask) != 0)
305 			return (0);
306 	}
307 	return (1);
308 }
309 
310 #define	__set_bit(i, a)							\
311     atomic_set_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i))
312 
313 #define	set_bit(i, a)							\
314     atomic_set_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i))
315 
316 #define	__clear_bit(i, a)						\
317     atomic_clear_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i))
318 
319 #define	clear_bit(i, a)							\
320     atomic_clear_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i))
321 
322 #define	test_bit(i, a)							\
323     !!(atomic_load_acq_long(&((volatile unsigned long *)(a))[BIT_WORD(i)]) &	\
324     BIT_MASK(i))
325 
326 static inline int
327 test_and_clear_bit(long bit, volatile unsigned long *var)
328 {
329 	long val;
330 
331 	var += BIT_WORD(bit);
332 	bit %= BITS_PER_LONG;
333 	bit = (1UL << bit);
334 	do {
335 		val = *var;
336 	} while (atomic_cmpset_long(var, val, val & ~bit) == 0);
337 
338 	return !!(val & bit);
339 }
340 
341 static inline int
342 __test_and_clear_bit(long bit, volatile unsigned long *var)
343 {
344 	long val;
345 
346 	var += BIT_WORD(bit);
347 	bit %= BITS_PER_LONG;
348 	bit = (1UL << bit);
349 
350 	val = *var;
351 	*var &= ~bit;
352 
353 	return !!(val & bit);
354 }
355 
356 static inline int
357 test_and_set_bit(long bit, volatile unsigned long *var)
358 {
359 	long val;
360 
361 	var += BIT_WORD(bit);
362 	bit %= BITS_PER_LONG;
363 	bit = (1UL << bit);
364 	do {
365 		val = *var;
366 	} while (atomic_cmpset_long(var, val, val | bit) == 0);
367 
368 	return !!(val & bit);
369 }
370 
371 static inline int
372 __test_and_set_bit(long bit, volatile unsigned long *var)
373 {
374 	long val;
375 
376 	var += BIT_WORD(bit);
377 	bit %= BITS_PER_LONG;
378 	bit = (1UL << bit);
379 
380 	val = *var;
381 	*var |= bit;
382 
383 	return !!(val & bit);
384 }
385 
386 static inline void
387 bitmap_set(unsigned long *map, int start, int nr)
388 {
389 	unsigned long *p = map + BIT_WORD(start);
390 	const int size = start + nr;
391 	int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG);
392 	unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start);
393 
394 	while (nr - bits_to_set >= 0) {
395 		*p |= mask_to_set;
396 		nr -= bits_to_set;
397 		bits_to_set = BITS_PER_LONG;
398 		mask_to_set = ~0UL;
399 		p++;
400 	}
401 	if (nr) {
402 		mask_to_set &= BITMAP_LAST_WORD_MASK(size);
403 		*p |= mask_to_set;
404 	}
405 }
406 
407 static inline void
408 bitmap_clear(unsigned long *map, int start, int nr)
409 {
410 	unsigned long *p = map + BIT_WORD(start);
411 	const int size = start + nr;
412 	int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG);
413 	unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start);
414 
415 	while (nr - bits_to_clear >= 0) {
416 		*p &= ~mask_to_clear;
417 		nr -= bits_to_clear;
418 		bits_to_clear = BITS_PER_LONG;
419 		mask_to_clear = ~0UL;
420 		p++;
421 	}
422 	if (nr) {
423 		mask_to_clear &= BITMAP_LAST_WORD_MASK(size);
424 		*p &= ~mask_to_clear;
425 	}
426 }
427 
428 enum {
429         REG_OP_ISFREE,
430         REG_OP_ALLOC,
431         REG_OP_RELEASE,
432 };
433 
434 static inline int
435 __reg_op(unsigned long *bitmap, int pos, int order, int reg_op)
436 {
437         int nbits_reg;
438         int index;
439         int offset;
440         int nlongs_reg;
441         int nbitsinlong;
442         unsigned long mask;
443         int i;
444         int ret = 0;
445 
446         nbits_reg = 1 << order;
447         index = pos / BITS_PER_LONG;
448         offset = pos - (index * BITS_PER_LONG);
449         nlongs_reg = BITS_TO_LONGS(nbits_reg);
450         nbitsinlong = min(nbits_reg,  BITS_PER_LONG);
451 
452         mask = (1UL << (nbitsinlong - 1));
453         mask += mask - 1;
454         mask <<= offset;
455 
456         switch (reg_op) {
457         case REG_OP_ISFREE:
458                 for (i = 0; i < nlongs_reg; i++) {
459                         if (bitmap[index + i] & mask)
460                                 goto done;
461                 }
462                 ret = 1;
463                 break;
464 
465         case REG_OP_ALLOC:
466                 for (i = 0; i < nlongs_reg; i++)
467                         bitmap[index + i] |= mask;
468                 break;
469 
470         case REG_OP_RELEASE:
471                 for (i = 0; i < nlongs_reg; i++)
472                         bitmap[index + i] &= ~mask;
473                 break;
474         }
475 done:
476         return ret;
477 }
478 
479 static inline int
480 bitmap_find_free_region(unsigned long *bitmap, int bits, int order)
481 {
482         int pos;
483         int end;
484 
485         for (pos = 0 ; (end = pos + (1 << order)) <= bits; pos = end) {
486                 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE))
487                         continue;
488                 __reg_op(bitmap, pos, order, REG_OP_ALLOC);
489                 return pos;
490         }
491         return -ENOMEM;
492 }
493 
494 static inline int
495 bitmap_allocate_region(unsigned long *bitmap, int pos, int order)
496 {
497         if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE))
498                 return -EBUSY;
499         __reg_op(bitmap, pos, order, REG_OP_ALLOC);
500         return 0;
501 }
502 
503 static inline void
504 bitmap_release_region(unsigned long *bitmap, int pos, int order)
505 {
506         __reg_op(bitmap, pos, order, REG_OP_RELEASE);
507 }
508 
509 #define for_each_set_bit(bit, addr, size) \
510 	for ((bit) = find_first_bit((addr), (size));		\
511 	     (bit) < (size);					\
512 	     (bit) = find_next_bit((addr), (size), (bit) + 1))
513 
514 static inline unsigned
515 bitmap_weight(unsigned long *bitmap, unsigned nbits)
516 {
517 	unsigned bit;
518 	unsigned retval = 0;
519 
520 	for_each_set_bit(bit, bitmap, nbits)
521 		retval++;
522 	return (retval);
523 }
524 
525 static inline int
526 bitmap_equal(const unsigned long *pa,
527     const unsigned long *pb, unsigned bits)
528 {
529 	unsigned x;
530 	unsigned y = bits / BITS_PER_LONG;
531 
532 	for (x = 0; x != y; x++) {
533 		if (pa[x] != pb[x])
534 			return (0);
535 	}
536 
537 	y = bits % BITS_PER_LONG;
538 	if (y != 0) {
539 		if ((pa[x] ^ pb[x]) & BITMAP_LAST_WORD_MASK(y))
540 			return (0);
541 	}
542 	return (1);
543 }
544 
545 #endif	/* _LINUX_BITOPS_H_ */
546