1 /*- 2 * Copyright (c) 2010 Isilon Systems, Inc. 3 * Copyright (c) 2010 iX Systems, Inc. 4 * Copyright (c) 2010 Panasas, Inc. 5 * Copyright (c) 2013-2017 Mellanox Technologies, Ltd. 6 * All rights reserved. 7 * 8 * Redistribution and use in source and binary forms, with or without 9 * modification, are permitted provided that the following conditions 10 * are met: 11 * 1. Redistributions of source code must retain the above copyright 12 * notice unmodified, this list of conditions, and the following 13 * disclaimer. 14 * 2. Redistributions in binary form must reproduce the above copyright 15 * notice, this list of conditions and the following disclaimer in the 16 * documentation and/or other materials provided with the distribution. 17 * 18 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 19 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 20 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 21 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 22 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 23 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 24 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 25 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 * 29 * $FreeBSD$ 30 */ 31 #ifndef _LINUX_BITOPS_H_ 32 #define _LINUX_BITOPS_H_ 33 34 #include <sys/param.h> 35 #include <sys/types.h> 36 #include <sys/systm.h> 37 #include <sys/errno.h> 38 #include <sys/libkern.h> 39 40 #define BIT(nr) (1UL << (nr)) 41 #define BIT_ULL(nr) (1ULL << (nr)) 42 #ifdef __LP64__ 43 #define BITS_PER_LONG 64 44 #else 45 #define BITS_PER_LONG 32 46 #endif 47 48 #define BITS_PER_LONG_LONG 64 49 50 #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) % BITS_PER_LONG)) 51 #define BITMAP_LAST_WORD_MASK(n) (~0UL >> (BITS_PER_LONG - (n))) 52 #define BITS_TO_LONGS(n) howmany((n), BITS_PER_LONG) 53 #define BIT_MASK(nr) (1UL << ((nr) & (BITS_PER_LONG - 1))) 54 #define BIT_WORD(nr) ((nr) / BITS_PER_LONG) 55 #define GENMASK(h, l) (((~0UL) >> (BITS_PER_LONG - (h) - 1)) & ((~0UL) << (l))) 56 #define GENMASK_ULL(h, l) (((~0ULL) >> (BITS_PER_LONG_LONG - (h) - 1)) & ((~0ULL) << (l))) 57 #define BITS_PER_BYTE 8 58 #define BITS_PER_TYPE(t) (sizeof(t) * BITS_PER_BYTE) 59 60 #define hweight8(x) bitcount((uint8_t)(x)) 61 #define hweight16(x) bitcount16(x) 62 #define hweight32(x) bitcount32(x) 63 #define hweight64(x) bitcount64(x) 64 #define hweight_long(x) bitcountl(x) 65 66 static inline int 67 __ffs(int mask) 68 { 69 return (ffs(mask) - 1); 70 } 71 72 static inline int 73 __fls(int mask) 74 { 75 return (fls(mask) - 1); 76 } 77 78 static inline int 79 __ffsl(long mask) 80 { 81 return (ffsl(mask) - 1); 82 } 83 84 static inline int 85 __flsl(long mask) 86 { 87 return (flsl(mask) - 1); 88 } 89 90 static inline int 91 fls64(uint64_t mask) 92 { 93 return (flsll(mask)); 94 } 95 96 static inline uint32_t 97 ror32(uint32_t word, unsigned int shift) 98 { 99 return ((word >> shift) | (word << (32 - shift))); 100 } 101 102 #define ffz(mask) __ffs(~(mask)) 103 104 static inline int get_count_order(unsigned int count) 105 { 106 int order; 107 108 order = fls(count) - 1; 109 if (count & (count - 1)) 110 order++; 111 return order; 112 } 113 114 static inline unsigned long 115 find_first_bit(const unsigned long *addr, unsigned long size) 116 { 117 long mask; 118 int bit; 119 120 for (bit = 0; size >= BITS_PER_LONG; 121 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) { 122 if (*addr == 0) 123 continue; 124 return (bit + __ffsl(*addr)); 125 } 126 if (size) { 127 mask = (*addr) & BITMAP_LAST_WORD_MASK(size); 128 if (mask) 129 bit += __ffsl(mask); 130 else 131 bit += size; 132 } 133 return (bit); 134 } 135 136 static inline unsigned long 137 find_first_zero_bit(const unsigned long *addr, unsigned long size) 138 { 139 long mask; 140 int bit; 141 142 for (bit = 0; size >= BITS_PER_LONG; 143 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) { 144 if (~(*addr) == 0) 145 continue; 146 return (bit + __ffsl(~(*addr))); 147 } 148 if (size) { 149 mask = ~(*addr) & BITMAP_LAST_WORD_MASK(size); 150 if (mask) 151 bit += __ffsl(mask); 152 else 153 bit += size; 154 } 155 return (bit); 156 } 157 158 static inline unsigned long 159 find_last_bit(const unsigned long *addr, unsigned long size) 160 { 161 long mask; 162 int offs; 163 int bit; 164 int pos; 165 166 pos = size / BITS_PER_LONG; 167 offs = size % BITS_PER_LONG; 168 bit = BITS_PER_LONG * pos; 169 addr += pos; 170 if (offs) { 171 mask = (*addr) & BITMAP_LAST_WORD_MASK(offs); 172 if (mask) 173 return (bit + __flsl(mask)); 174 } 175 while (pos--) { 176 addr--; 177 bit -= BITS_PER_LONG; 178 if (*addr) 179 return (bit + __flsl(*addr)); 180 } 181 return (size); 182 } 183 184 static inline unsigned long 185 find_next_bit(const unsigned long *addr, unsigned long size, unsigned long offset) 186 { 187 long mask; 188 int offs; 189 int bit; 190 int pos; 191 192 if (offset >= size) 193 return (size); 194 pos = offset / BITS_PER_LONG; 195 offs = offset % BITS_PER_LONG; 196 bit = BITS_PER_LONG * pos; 197 addr += pos; 198 if (offs) { 199 mask = (*addr) & ~BITMAP_LAST_WORD_MASK(offs); 200 if (mask) 201 return (bit + __ffsl(mask)); 202 if (size - bit <= BITS_PER_LONG) 203 return (size); 204 bit += BITS_PER_LONG; 205 addr++; 206 } 207 for (size -= bit; size >= BITS_PER_LONG; 208 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) { 209 if (*addr == 0) 210 continue; 211 return (bit + __ffsl(*addr)); 212 } 213 if (size) { 214 mask = (*addr) & BITMAP_LAST_WORD_MASK(size); 215 if (mask) 216 bit += __ffsl(mask); 217 else 218 bit += size; 219 } 220 return (bit); 221 } 222 223 static inline unsigned long 224 find_next_zero_bit(const unsigned long *addr, unsigned long size, 225 unsigned long offset) 226 { 227 long mask; 228 int offs; 229 int bit; 230 int pos; 231 232 if (offset >= size) 233 return (size); 234 pos = offset / BITS_PER_LONG; 235 offs = offset % BITS_PER_LONG; 236 bit = BITS_PER_LONG * pos; 237 addr += pos; 238 if (offs) { 239 mask = ~(*addr) & ~BITMAP_LAST_WORD_MASK(offs); 240 if (mask) 241 return (bit + __ffsl(mask)); 242 if (size - bit <= BITS_PER_LONG) 243 return (size); 244 bit += BITS_PER_LONG; 245 addr++; 246 } 247 for (size -= bit; size >= BITS_PER_LONG; 248 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) { 249 if (~(*addr) == 0) 250 continue; 251 return (bit + __ffsl(~(*addr))); 252 } 253 if (size) { 254 mask = ~(*addr) & BITMAP_LAST_WORD_MASK(size); 255 if (mask) 256 bit += __ffsl(mask); 257 else 258 bit += size; 259 } 260 return (bit); 261 } 262 263 #define __set_bit(i, a) \ 264 atomic_set_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i)) 265 266 #define set_bit(i, a) \ 267 atomic_set_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i)) 268 269 #define __clear_bit(i, a) \ 270 atomic_clear_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i)) 271 272 #define clear_bit(i, a) \ 273 atomic_clear_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i)) 274 275 #define clear_bit_unlock(i, a) \ 276 atomic_clear_rel_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i)) 277 278 #define test_bit(i, a) \ 279 !!(READ_ONCE(((volatile const unsigned long *)(a))[BIT_WORD(i)]) & BIT_MASK(i)) 280 281 static inline int 282 test_and_clear_bit(long bit, volatile unsigned long *var) 283 { 284 long val; 285 286 var += BIT_WORD(bit); 287 bit %= BITS_PER_LONG; 288 bit = (1UL << bit); 289 290 val = *var; 291 while (!atomic_fcmpset_long(var, &val, val & ~bit)) 292 ; 293 return !!(val & bit); 294 } 295 296 static inline int 297 __test_and_clear_bit(long bit, volatile unsigned long *var) 298 { 299 long val; 300 301 var += BIT_WORD(bit); 302 bit %= BITS_PER_LONG; 303 bit = (1UL << bit); 304 305 val = *var; 306 *var &= ~bit; 307 308 return !!(val & bit); 309 } 310 311 static inline int 312 test_and_set_bit(long bit, volatile unsigned long *var) 313 { 314 long val; 315 316 var += BIT_WORD(bit); 317 bit %= BITS_PER_LONG; 318 bit = (1UL << bit); 319 320 val = *var; 321 while (!atomic_fcmpset_long(var, &val, val | bit)) 322 ; 323 return !!(val & bit); 324 } 325 326 static inline int 327 __test_and_set_bit(long bit, volatile unsigned long *var) 328 { 329 long val; 330 331 var += BIT_WORD(bit); 332 bit %= BITS_PER_LONG; 333 bit = (1UL << bit); 334 335 val = *var; 336 *var |= bit; 337 338 return !!(val & bit); 339 } 340 341 enum { 342 REG_OP_ISFREE, 343 REG_OP_ALLOC, 344 REG_OP_RELEASE, 345 }; 346 347 static inline int 348 linux_reg_op(unsigned long *bitmap, int pos, int order, int reg_op) 349 { 350 int nbits_reg; 351 int index; 352 int offset; 353 int nlongs_reg; 354 int nbitsinlong; 355 unsigned long mask; 356 int i; 357 int ret = 0; 358 359 nbits_reg = 1 << order; 360 index = pos / BITS_PER_LONG; 361 offset = pos - (index * BITS_PER_LONG); 362 nlongs_reg = BITS_TO_LONGS(nbits_reg); 363 nbitsinlong = MIN(nbits_reg, BITS_PER_LONG); 364 365 mask = (1UL << (nbitsinlong - 1)); 366 mask += mask - 1; 367 mask <<= offset; 368 369 switch (reg_op) { 370 case REG_OP_ISFREE: 371 for (i = 0; i < nlongs_reg; i++) { 372 if (bitmap[index + i] & mask) 373 goto done; 374 } 375 ret = 1; 376 break; 377 378 case REG_OP_ALLOC: 379 for (i = 0; i < nlongs_reg; i++) 380 bitmap[index + i] |= mask; 381 break; 382 383 case REG_OP_RELEASE: 384 for (i = 0; i < nlongs_reg; i++) 385 bitmap[index + i] &= ~mask; 386 break; 387 } 388 done: 389 return ret; 390 } 391 392 #define for_each_set_bit(bit, addr, size) \ 393 for ((bit) = find_first_bit((addr), (size)); \ 394 (bit) < (size); \ 395 (bit) = find_next_bit((addr), (size), (bit) + 1)) 396 397 #define for_each_clear_bit(bit, addr, size) \ 398 for ((bit) = find_first_zero_bit((addr), (size)); \ 399 (bit) < (size); \ 400 (bit) = find_next_zero_bit((addr), (size), (bit) + 1)) 401 402 static inline uint64_t 403 sign_extend64(uint64_t value, int index) 404 { 405 uint8_t shift = 63 - index; 406 407 return ((int64_t)(value << shift) >> shift); 408 } 409 410 #endif /* _LINUX_BITOPS_H_ */ 411