xref: /linux/include/linux/bitfield.h (revision f468cf53c5240bf5063d0c6fe620b5ae2de37801)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Copyright (C) 2014 Felix Fietkau <nbd@nbd.name>
4  * Copyright (C) 2004 - 2009 Ivo van Doorn <IvDoorn@gmail.com>
5  */
6 
7 #ifndef _LINUX_BITFIELD_H
8 #define _LINUX_BITFIELD_H
9 
10 #include <linux/build_bug.h>
11 #include <linux/typecheck.h>
12 #include <asm/byteorder.h>
13 
14 /*
15  * Bitfield access macros
16  *
17  * FIELD_{GET,PREP} macros take as first parameter shifted mask
18  * from which they extract the base mask and shift amount.
19  * Mask must be a compilation time constant.
20  * field_{get,prep} are variants that take a non-const mask.
21  *
22  * Example:
23  *
24  *  #include <linux/bitfield.h>
25  *  #include <linux/bits.h>
26  *
27  *  #define REG_FIELD_A  GENMASK(6, 0)
28  *  #define REG_FIELD_B  BIT(7)
29  *  #define REG_FIELD_C  GENMASK(15, 8)
30  *  #define REG_FIELD_D  GENMASK(31, 16)
31  *
32  * Get:
33  *  a = FIELD_GET(REG_FIELD_A, reg);
34  *  b = FIELD_GET(REG_FIELD_B, reg);
35  *
36  * Set:
37  *  reg = FIELD_PREP(REG_FIELD_A, 1) |
38  *	  FIELD_PREP(REG_FIELD_B, 0) |
39  *	  FIELD_PREP(REG_FIELD_C, c) |
40  *	  FIELD_PREP(REG_FIELD_D, 0x40);
41  *
42  * Modify:
43  *  FIELD_MODIFY(REG_FIELD_C, &reg, c);
44  */
45 
46 #define __bf_shf(x) (__builtin_ffsll(x) - 1)
47 
48 #define __scalar_type_to_unsigned_cases(type)				\
49 		unsigned type:	(unsigned type)0,			\
50 		signed type:	(unsigned type)0
51 
52 #define __unsigned_scalar_typeof(x) typeof(				\
53 		_Generic((x),						\
54 			char:	(unsigned char)0,			\
55 			__scalar_type_to_unsigned_cases(char),		\
56 			__scalar_type_to_unsigned_cases(short),		\
57 			__scalar_type_to_unsigned_cases(int),		\
58 			__scalar_type_to_unsigned_cases(long),		\
59 			__scalar_type_to_unsigned_cases(long long),	\
60 			default: (x)))
61 
62 #define __bf_cast_unsigned(type, x)	((__unsigned_scalar_typeof(type))(x))
63 
64 #define __BF_FIELD_CHECK_MASK(_mask, _val, _pfx)			\
65 	({								\
66 		BUILD_BUG_ON_MSG(!__builtin_constant_p(_mask),		\
67 				 _pfx "mask is not constant");		\
68 		BUILD_BUG_ON_MSG((_mask) == 0, _pfx "mask is zero");	\
69 		BUILD_BUG_ON_MSG(__builtin_constant_p(_val) ?		\
70 				 ~((_mask) >> __bf_shf(_mask)) &	\
71 					(0 + (_val)) : 0,		\
72 				 _pfx "value too large for the field"); \
73 		__BUILD_BUG_ON_NOT_POWER_OF_2((_mask) +			\
74 					      (1ULL << __bf_shf(_mask))); \
75 	})
76 
77 #define __BF_FIELD_CHECK_REG(mask, reg, pfx)				\
78 	BUILD_BUG_ON_MSG(__bf_cast_unsigned(mask, mask) >		\
79 			 __bf_cast_unsigned(reg, ~0ull),		\
80 			 pfx "type of reg too small for mask")
81 
82 #define __BF_FIELD_CHECK(mask, reg, val, pfx)				\
83 	({								\
84 		__BF_FIELD_CHECK_MASK(mask, val, pfx);			\
85 		__BF_FIELD_CHECK_REG(mask, reg, pfx);			\
86 	})
87 
88 #define __FIELD_PREP(mask, val, pfx)					\
89 	({								\
90 		__BF_FIELD_CHECK_MASK(mask, val, pfx);			\
91 		((typeof(mask))(val) << __bf_shf(mask)) & (mask);	\
92 	})
93 
94 #define __FIELD_GET(mask, reg, pfx)					\
95 	({								\
96 		__BF_FIELD_CHECK_MASK(mask, 0U, pfx);			\
97 		(typeof(mask))(((reg) & (mask)) >> __bf_shf(mask));	\
98 	})
99 
100 /**
101  * FIELD_MAX() - produce the maximum value representable by a field
102  * @_mask: shifted mask defining the field's length and position
103  *
104  * FIELD_MAX() returns the maximum value that can be held in the field
105  * specified by @_mask.
106  */
107 #define FIELD_MAX(_mask)						\
108 	({								\
109 		__BF_FIELD_CHECK(_mask, 0ULL, 0ULL, "FIELD_MAX: ");	\
110 		(typeof(_mask))((_mask) >> __bf_shf(_mask));		\
111 	})
112 
113 /**
114  * FIELD_FIT() - check if value fits in the field
115  * @_mask: shifted mask defining the field's length and position
116  * @_val:  value to test against the field
117  *
118  * Return: true if @_val can fit inside @_mask, false if @_val is too big.
119  */
120 #define FIELD_FIT(_mask, _val)						\
121 	({								\
122 		__BF_FIELD_CHECK(_mask, 0ULL, 0ULL, "FIELD_FIT: ");	\
123 		!((((typeof(_mask))_val) << __bf_shf(_mask)) & ~(_mask)); \
124 	})
125 
126 /**
127  * FIELD_PREP() - prepare a bitfield element
128  * @_mask: shifted mask defining the field's length and position
129  * @_val:  value to put in the field
130  *
131  * FIELD_PREP() masks and shifts up the value.  The result should
132  * be combined with other fields of the bitfield using logical OR.
133  */
134 #define FIELD_PREP(_mask, _val)						\
135 	({								\
136 		__BF_FIELD_CHECK_REG(_mask, 0ULL, "FIELD_PREP: ");	\
137 		__FIELD_PREP(_mask, _val, "FIELD_PREP: ");		\
138 	})
139 
140 #define __BF_CHECK_POW2(n)	BUILD_BUG_ON_ZERO(((n) & ((n) - 1)) != 0)
141 
142 /**
143  * FIELD_PREP_CONST() - prepare a constant bitfield element
144  * @_mask: shifted mask defining the field's length and position
145  * @_val:  value to put in the field
146  *
147  * FIELD_PREP_CONST() masks and shifts up the value.  The result should
148  * be combined with other fields of the bitfield using logical OR.
149  *
150  * Unlike FIELD_PREP() this is a constant expression and can therefore
151  * be used in initializers. Error checking is less comfortable for this
152  * version, and non-constant masks cannot be used.
153  */
154 #define FIELD_PREP_CONST(_mask, _val)					\
155 	(								\
156 		/* mask must be non-zero */				\
157 		BUILD_BUG_ON_ZERO((_mask) == 0) +			\
158 		/* check if value fits */				\
159 		BUILD_BUG_ON_ZERO(~((_mask) >> __bf_shf(_mask)) & (_val)) + \
160 		/* check if mask is contiguous */			\
161 		__BF_CHECK_POW2((_mask) + (1ULL << __bf_shf(_mask))) +	\
162 		/* and create the value */				\
163 		(((typeof(_mask))(_val) << __bf_shf(_mask)) & (_mask))	\
164 	)
165 
166 /**
167  * FIELD_GET() - extract a bitfield element
168  * @_mask: shifted mask defining the field's length and position
169  * @_reg:  value of entire bitfield
170  *
171  * FIELD_GET() extracts the field specified by @_mask from the
172  * bitfield passed in as @_reg by masking and shifting it down.
173  */
174 #define FIELD_GET(_mask, _reg)						\
175 	({								\
176 		__BF_FIELD_CHECK_REG(_mask, _reg, "FIELD_GET: ");	\
177 		__FIELD_GET(_mask, _reg, "FIELD_GET: ");		\
178 	})
179 
180 /**
181  * FIELD_MODIFY() - modify a bitfield element
182  * @_mask: shifted mask defining the field's length and position
183  * @_reg_p: pointer to the memory that should be updated
184  * @_val: value to store in the bitfield
185  *
186  * FIELD_MODIFY() modifies the set of bits in @_reg_p specified by @_mask,
187  * by replacing them with the bitfield value passed in as @_val.
188  */
189 #define FIELD_MODIFY(_mask, _reg_p, _val)						\
190 	({										\
191 		typecheck_pointer(_reg_p);						\
192 		__BF_FIELD_CHECK(_mask, *(_reg_p), _val, "FIELD_MODIFY: ");		\
193 		*(_reg_p) &= ~(_mask);							\
194 		*(_reg_p) |= (((typeof(_mask))(_val) << __bf_shf(_mask)) & (_mask));	\
195 	})
196 
197 extern void __compiletime_error("value doesn't fit into mask")
198 __field_overflow(void);
199 extern void __compiletime_error("bad bitfield mask")
200 __bad_mask(void);
field_multiplier(u64 field)201 static __always_inline u64 field_multiplier(u64 field)
202 {
203 	if ((field | (field - 1)) & ((field | (field - 1)) + 1))
204 		__bad_mask();
205 	return field & -field;
206 }
field_mask(u64 field)207 static __always_inline u64 field_mask(u64 field)
208 {
209 	return field / field_multiplier(field);
210 }
211 #define field_max(field)	((typeof(field))field_mask(field))
212 #define ____MAKE_OP(type,base,to,from)					\
213 static __always_inline __##type __must_check type##_encode_bits(base v, base field)	\
214 {									\
215 	if (__builtin_constant_p(v) && (v & ~field_mask(field)))	\
216 		__field_overflow();					\
217 	return to((v & field_mask(field)) * field_multiplier(field));	\
218 }									\
219 static __always_inline __##type __must_check type##_replace_bits(__##type old,	\
220 							base val, base field)	\
221 {									\
222 	return (old & ~to(field)) | type##_encode_bits(val, field);	\
223 }									\
224 static __always_inline void type##p_replace_bits(__##type *p,		\
225 					base val, base field)		\
226 {									\
227 	*p = (*p & ~to(field)) | type##_encode_bits(val, field);	\
228 }									\
229 static __always_inline base __must_check type##_get_bits(__##type v, base field)	\
230 {									\
231 	return (from(v) & field)/field_multiplier(field);		\
232 }
233 #define __MAKE_OP(size)							\
234 	____MAKE_OP(le##size,u##size,cpu_to_le##size,le##size##_to_cpu)	\
235 	____MAKE_OP(be##size,u##size,cpu_to_be##size,be##size##_to_cpu)	\
236 	____MAKE_OP(u##size,u##size,,)
237 ____MAKE_OP(u8,u8,,)
238 __MAKE_OP(16)
239 __MAKE_OP(32)
240 __MAKE_OP(64)
241 #undef __MAKE_OP
242 #undef ____MAKE_OP
243 
244 #define __field_prep(mask, val)						\
245 	({								\
246 		__auto_type __mask = (mask);				\
247 		typeof(__mask) __val = (val);				\
248 		unsigned int __shift = BITS_PER_TYPE(__mask) <= 32 ?	\
249 				       __ffs(__mask) : __ffs64(__mask);	\
250 		(__val << __shift) & __mask;				\
251 	})
252 
253 #define __field_get(mask, reg)						\
254 	({								\
255 		__auto_type __mask = (mask);				\
256 		typeof(__mask) __reg =  (reg);				\
257 		unsigned int __shift = BITS_PER_TYPE(__mask) <= 32 ?	\
258 				       __ffs(__mask) : __ffs64(__mask);	\
259 		(__reg & __mask) >> __shift;				\
260 	})
261 
262 /**
263  * field_prep() - prepare a bitfield element
264  * @mask: shifted mask defining the field's length and position, must be
265  *        non-zero
266  * @val:  value to put in the field
267  *
268  * Return: field value masked and shifted to its final destination
269  *
270  * field_prep() masks and shifts up the value.  The result should be
271  * combined with other fields of the bitfield using logical OR.
272  * Unlike FIELD_PREP(), @mask is not limited to a compile-time constant.
273  * Typical usage patterns are a value stored in a table, or calculated by
274  * shifting a constant by a variable number of bits.
275  * If you want to ensure that @mask is a compile-time constant, please use
276  * FIELD_PREP() directly instead.
277  */
278 #define field_prep(mask, val)						\
279 	(__builtin_constant_p(mask) ? __FIELD_PREP(mask, val, "field_prep: ") \
280 				    : __field_prep(mask, val))
281 
282 /**
283  * field_get() - extract a bitfield element
284  * @mask: shifted mask defining the field's length and position, must be
285  *        non-zero
286  * @reg:  value of entire bitfield
287  *
288  * Return: extracted field value
289  *
290  * field_get() extracts the field specified by @mask from the
291  * bitfield passed in as @reg by masking and shifting it down.
292  * Unlike FIELD_GET(), @mask is not limited to a compile-time constant.
293  * Typical usage patterns are a value stored in a table, or calculated by
294  * shifting a constant by a variable number of bits.
295  * If you want to ensure that @mask is a compile-time constant, please use
296  * FIELD_GET() directly instead.
297  */
298 #define field_get(mask, reg)						\
299 	(__builtin_constant_p(mask) ? __FIELD_GET(mask, reg, "field_get: ") \
300 				    : __field_get(mask, reg))
301 
302 #endif
303