xref: /freebsd/sys/compat/linuxkpi/common/include/linux/bitops.h (revision 23541160bb3e58f5deb04a299eda60fc80b731bc)
1 /*-
2  * Copyright (c) 2010 Isilon Systems, Inc.
3  * Copyright (c) 2010 iX Systems, Inc.
4  * Copyright (c) 2010 Panasas, Inc.
5  * Copyright (c) 2013-2015 Mellanox Technologies, Ltd.
6  * All rights reserved.
7  *
8  * Redistribution and use in source and binary forms, with or without
9  * modification, are permitted provided that the following conditions
10  * are met:
11  * 1. Redistributions of source code must retain the above copyright
12  *    notice unmodified, this list of conditions, and the following
13  *    disclaimer.
14  * 2. Redistributions in binary form must reproduce the above copyright
15  *    notice, this list of conditions and the following disclaimer in the
16  *    documentation and/or other materials provided with the distribution.
17  *
18  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28  *
29  * $FreeBSD$
30  */
31 #ifndef	_LINUX_BITOPS_H_
32 #define	_LINUX_BITOPS_H_
33 
34 #include <sys/types.h>
35 #include <sys/systm.h>
36 #include <sys/errno.h>
37 
38 #define	BIT(nr)			(1UL << (nr))
39 #ifdef __LP64__
40 #define	BITS_PER_LONG		64
41 #else
42 #define	BITS_PER_LONG		32
43 #endif
44 #define	BITMAP_FIRST_WORD_MASK(start)	(~0UL << ((start) % BITS_PER_LONG))
45 #define	BITMAP_LAST_WORD_MASK(n)	(~0UL >> (BITS_PER_LONG - (n)))
46 #define	BITS_TO_LONGS(n)	howmany((n), BITS_PER_LONG)
47 #define	BIT_MASK(nr)		(1UL << ((nr) & (BITS_PER_LONG - 1)))
48 #define BIT_WORD(nr)		((nr) / BITS_PER_LONG)
49 #define	GENMASK(lo, hi)		(((2UL << ((hi) - (lo))) - 1UL) << (lo))
50 #define BITS_PER_BYTE           8
51 
52 static inline int
53 __ffs(int mask)
54 {
55 	return (ffs(mask) - 1);
56 }
57 
58 static inline int
59 __fls(int mask)
60 {
61 	return (fls(mask) - 1);
62 }
63 
64 static inline int
65 __ffsl(long mask)
66 {
67 	return (ffsl(mask) - 1);
68 }
69 
70 static inline int
71 __flsl(long mask)
72 {
73 	return (flsl(mask) - 1);
74 }
75 
76 
77 #define	ffz(mask)	__ffs(~(mask))
78 
79 static inline int get_count_order(unsigned int count)
80 {
81         int order;
82 
83         order = fls(count) - 1;
84         if (count & (count - 1))
85                 order++;
86         return order;
87 }
88 
89 static inline unsigned long
90 find_first_bit(unsigned long *addr, unsigned long size)
91 {
92 	long mask;
93 	int bit;
94 
95 	for (bit = 0; size >= BITS_PER_LONG;
96 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
97 		if (*addr == 0)
98 			continue;
99 		return (bit + __ffsl(*addr));
100 	}
101 	if (size) {
102 		mask = (*addr) & BITMAP_LAST_WORD_MASK(size);
103 		if (mask)
104 			bit += __ffsl(mask);
105 		else
106 			bit += size;
107 	}
108 	return (bit);
109 }
110 
111 static inline unsigned long
112 find_first_zero_bit(unsigned long *addr, unsigned long size)
113 {
114 	long mask;
115 	int bit;
116 
117 	for (bit = 0; size >= BITS_PER_LONG;
118 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
119 		if (~(*addr) == 0)
120 			continue;
121 		return (bit + __ffsl(~(*addr)));
122 	}
123 	if (size) {
124 		mask = ~(*addr) & BITMAP_LAST_WORD_MASK(size);
125 		if (mask)
126 			bit += __ffsl(mask);
127 		else
128 			bit += size;
129 	}
130 	return (bit);
131 }
132 
133 static inline unsigned long
134 find_last_bit(unsigned long *addr, unsigned long size)
135 {
136 	long mask;
137 	int offs;
138 	int bit;
139 	int pos;
140 
141 	pos = size / BITS_PER_LONG;
142 	offs = size % BITS_PER_LONG;
143 	bit = BITS_PER_LONG * pos;
144 	addr += pos;
145 	if (offs) {
146 		mask = (*addr) & BITMAP_LAST_WORD_MASK(offs);
147 		if (mask)
148 			return (bit + __flsl(mask));
149 	}
150 	while (--pos) {
151 		addr--;
152 		bit -= BITS_PER_LONG;
153 		if (*addr)
154 			return (bit + __flsl(mask));
155 	}
156 	return (size);
157 }
158 
159 static inline unsigned long
160 find_next_bit(unsigned long *addr, unsigned long size, unsigned long offset)
161 {
162 	long mask;
163 	int offs;
164 	int bit;
165 	int pos;
166 
167 	if (offset >= size)
168 		return (size);
169 	pos = offset / BITS_PER_LONG;
170 	offs = offset % BITS_PER_LONG;
171 	bit = BITS_PER_LONG * pos;
172 	addr += pos;
173 	if (offs) {
174 		mask = (*addr) & ~BITMAP_LAST_WORD_MASK(offs);
175 		if (mask)
176 			return (bit + __ffsl(mask));
177 		if (size - bit <= BITS_PER_LONG)
178 			return (size);
179 		bit += BITS_PER_LONG;
180 		addr++;
181 	}
182 	for (size -= bit; size >= BITS_PER_LONG;
183 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
184 		if (*addr == 0)
185 			continue;
186 		return (bit + __ffsl(*addr));
187 	}
188 	if (size) {
189 		mask = (*addr) & BITMAP_LAST_WORD_MASK(size);
190 		if (mask)
191 			bit += __ffsl(mask);
192 		else
193 			bit += size;
194 	}
195 	return (bit);
196 }
197 
198 static inline unsigned long
199 find_next_zero_bit(unsigned long *addr, unsigned long size,
200     unsigned long offset)
201 {
202 	long mask;
203 	int offs;
204 	int bit;
205 	int pos;
206 
207 	if (offset >= size)
208 		return (size);
209 	pos = offset / BITS_PER_LONG;
210 	offs = offset % BITS_PER_LONG;
211 	bit = BITS_PER_LONG * pos;
212 	addr += pos;
213 	if (offs) {
214 		mask = ~(*addr) & ~BITMAP_LAST_WORD_MASK(offs);
215 		if (mask)
216 			return (bit + __ffsl(mask));
217 		if (size - bit <= BITS_PER_LONG)
218 			return (size);
219 		bit += BITS_PER_LONG;
220 		addr++;
221 	}
222 	for (size -= bit; size >= BITS_PER_LONG;
223 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
224 		if (~(*addr) == 0)
225 			continue;
226 		return (bit + __ffsl(~(*addr)));
227 	}
228 	if (size) {
229 		mask = ~(*addr) & BITMAP_LAST_WORD_MASK(size);
230 		if (mask)
231 			bit += __ffsl(mask);
232 		else
233 			bit += size;
234 	}
235 	return (bit);
236 }
237 
238 static inline void
239 bitmap_zero(unsigned long *addr, int size)
240 {
241 	int len;
242 
243 	len = BITS_TO_LONGS(size) * sizeof(long);
244 	memset(addr, 0, len);
245 }
246 
247 static inline void
248 bitmap_fill(unsigned long *addr, int size)
249 {
250 	int tail;
251 	int len;
252 
253 	len = (size / BITS_PER_LONG) * sizeof(long);
254 	memset(addr, 0xff, len);
255 	tail = size & (BITS_PER_LONG - 1);
256 	if (tail)
257 		addr[size / BITS_PER_LONG] = BITMAP_LAST_WORD_MASK(tail);
258 }
259 
260 static inline int
261 bitmap_full(unsigned long *addr, int size)
262 {
263 	unsigned long mask;
264 	int tail;
265 	int len;
266 	int i;
267 
268 	len = size / BITS_PER_LONG;
269 	for (i = 0; i < len; i++)
270 		if (addr[i] != ~0UL)
271 			return (0);
272 	tail = size & (BITS_PER_LONG - 1);
273 	if (tail) {
274 		mask = BITMAP_LAST_WORD_MASK(tail);
275 		if ((addr[i] & mask) != mask)
276 			return (0);
277 	}
278 	return (1);
279 }
280 
281 static inline int
282 bitmap_empty(unsigned long *addr, int size)
283 {
284 	unsigned long mask;
285 	int tail;
286 	int len;
287 	int i;
288 
289 	len = size / BITS_PER_LONG;
290 	for (i = 0; i < len; i++)
291 		if (addr[i] != 0)
292 			return (0);
293 	tail = size & (BITS_PER_LONG - 1);
294 	if (tail) {
295 		mask = BITMAP_LAST_WORD_MASK(tail);
296 		if ((addr[i] & mask) != 0)
297 			return (0);
298 	}
299 	return (1);
300 }
301 
302 #define	__set_bit(i, a)							\
303     atomic_set_long(&((volatile long *)(a))[BIT_WORD(i)], BIT_MASK(i))
304 
305 #define	set_bit(i, a)							\
306     atomic_set_long(&((volatile long *)(a))[BIT_WORD(i)], BIT_MASK(i))
307 
308 #define	__clear_bit(i, a)						\
309     atomic_clear_long(&((volatile long *)(a))[BIT_WORD(i)], BIT_MASK(i))
310 
311 #define	clear_bit(i, a)							\
312     atomic_clear_long(&((volatile long *)(a))[BIT_WORD(i)], BIT_MASK(i))
313 
314 #define	test_bit(i, a)							\
315     !!(atomic_load_acq_long(&((volatile long *)(a))[BIT_WORD(i)]) &	\
316     BIT_MASK(i))
317 
318 static inline long
319 test_and_clear_bit(long bit, long *var)
320 {
321 	long val;
322 
323 	var += BIT_WORD(bit);
324 	bit %= BITS_PER_LONG;
325 	bit = (1UL << bit);
326 	do {
327 		val = *(volatile long *)var;
328 	} while (atomic_cmpset_long(var, val, val & ~bit) == 0);
329 
330 	return !!(val & bit);
331 }
332 
333 static inline long
334 test_and_set_bit(long bit, long *var)
335 {
336 	long val;
337 
338 	var += BIT_WORD(bit);
339 	bit %= BITS_PER_LONG;
340 	bit = (1UL << bit);
341 	do {
342 		val = *(volatile long *)var;
343 	} while (atomic_cmpset_long(var, val, val | bit) == 0);
344 
345 	return !!(val & bit);
346 }
347 
348 static inline void
349 bitmap_set(unsigned long *map, int start, int nr)
350 {
351 	unsigned long *p = map + BIT_WORD(start);
352 	const int size = start + nr;
353 	int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG);
354 	unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start);
355 
356 	while (nr - bits_to_set >= 0) {
357 		*p |= mask_to_set;
358 		nr -= bits_to_set;
359 		bits_to_set = BITS_PER_LONG;
360 		mask_to_set = ~0UL;
361 		p++;
362 	}
363 	if (nr) {
364 		mask_to_set &= BITMAP_LAST_WORD_MASK(size);
365 		*p |= mask_to_set;
366 	}
367 }
368 
369 static inline void
370 bitmap_clear(unsigned long *map, int start, int nr)
371 {
372 	unsigned long *p = map + BIT_WORD(start);
373 	const int size = start + nr;
374 	int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG);
375 	unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start);
376 
377 	while (nr - bits_to_clear >= 0) {
378 		*p &= ~mask_to_clear;
379 		nr -= bits_to_clear;
380 		bits_to_clear = BITS_PER_LONG;
381 		mask_to_clear = ~0UL;
382 		p++;
383 	}
384 	if (nr) {
385 		mask_to_clear &= BITMAP_LAST_WORD_MASK(size);
386 		*p &= ~mask_to_clear;
387 	}
388 }
389 
390 enum {
391         REG_OP_ISFREE,
392         REG_OP_ALLOC,
393         REG_OP_RELEASE,
394 };
395 
396 static int __reg_op(unsigned long *bitmap, int pos, int order, int reg_op)
397 {
398         int nbits_reg;
399         int index;
400         int offset;
401         int nlongs_reg;
402         int nbitsinlong;
403         unsigned long mask;
404         int i;
405         int ret = 0;
406 
407         nbits_reg = 1 << order;
408         index = pos / BITS_PER_LONG;
409         offset = pos - (index * BITS_PER_LONG);
410         nlongs_reg = BITS_TO_LONGS(nbits_reg);
411         nbitsinlong = min(nbits_reg,  BITS_PER_LONG);
412 
413         mask = (1UL << (nbitsinlong - 1));
414         mask += mask - 1;
415         mask <<= offset;
416 
417         switch (reg_op) {
418         case REG_OP_ISFREE:
419                 for (i = 0; i < nlongs_reg; i++) {
420                         if (bitmap[index + i] & mask)
421                                 goto done;
422                 }
423                 ret = 1;
424                 break;
425 
426         case REG_OP_ALLOC:
427                 for (i = 0; i < nlongs_reg; i++)
428                         bitmap[index + i] |= mask;
429                 break;
430 
431         case REG_OP_RELEASE:
432                 for (i = 0; i < nlongs_reg; i++)
433                         bitmap[index + i] &= ~mask;
434                 break;
435         }
436 done:
437         return ret;
438 }
439 
440 static inline int
441 bitmap_find_free_region(unsigned long *bitmap, int bits, int order)
442 {
443         int pos;
444         int end;
445 
446         for (pos = 0 ; (end = pos + (1 << order)) <= bits; pos = end) {
447                 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE))
448                         continue;
449                 __reg_op(bitmap, pos, order, REG_OP_ALLOC);
450                 return pos;
451         }
452         return -ENOMEM;
453 }
454 
455 static inline int
456 bitmap_allocate_region(unsigned long *bitmap, int pos, int order)
457 {
458         if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE))
459                 return -EBUSY;
460         __reg_op(bitmap, pos, order, REG_OP_ALLOC);
461         return 0;
462 }
463 
464 static inline void
465 bitmap_release_region(unsigned long *bitmap, int pos, int order)
466 {
467         __reg_op(bitmap, pos, order, REG_OP_RELEASE);
468 }
469 
470 #define for_each_set_bit(bit, addr, size) \
471 	for ((bit) = find_first_bit((addr), (size));		\
472 	     (bit) < (size);					\
473 	     (bit) = find_next_bit((addr), (size), (bit) + 1))
474 
475 static inline unsigned
476 bitmap_weight(unsigned long *bitmap, unsigned nbits)
477 {
478 	unsigned bit;
479 	unsigned retval = 0;
480 
481 	for_each_set_bit(bit, bitmap, nbits)
482 		retval++;
483 	return (retval);
484 }
485 
486 static inline int
487 bitmap_equal(const unsigned long *pa,
488     const unsigned long *pb, unsigned bits)
489 {
490 	unsigned x;
491 	unsigned y = bits / BITS_PER_LONG;
492 
493 	for (x = 0; x != y; x++) {
494 		if (pa[x] != pb[x])
495 			return (0);
496 	}
497 
498 	y = bits % BITS_PER_LONG;
499 	if (y != 0) {
500 		if ((pa[x] ^ pb[x]) & BITMAP_LAST_WORD_MASK(y))
501 			return (0);
502 	}
503 	return (1);
504 }
505 
506 #endif	/* _LINUX_BITOPS_H_ */
507