async: Introduce async_schedule_dev_nocall()
[platform/kernel/linux-starfive.git] / include / linux / bitfield.h
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Copyright (C) 2014 Felix Fietkau <nbd@nbd.name>
4  * Copyright (C) 2004 - 2009 Ivo van Doorn <IvDoorn@gmail.com>
5  */
6
7 #ifndef _LINUX_BITFIELD_H
8 #define _LINUX_BITFIELD_H
9
10 #include <linux/build_bug.h>
11 #include <asm/byteorder.h>
12
13 /*
14  * Bitfield access macros
15  *
16  * FIELD_{GET,PREP} macros take as first parameter shifted mask
17  * from which they extract the base mask and shift amount.
18  * Mask must be a compilation time constant.
19  *
20  * Example:
21  *
22  *  #include <linux/bitfield.h>
23  *  #include <linux/bits.h>
24  *
25  *  #define REG_FIELD_A  GENMASK(6, 0)
26  *  #define REG_FIELD_B  BIT(7)
27  *  #define REG_FIELD_C  GENMASK(15, 8)
28  *  #define REG_FIELD_D  GENMASK(31, 16)
29  *
30  * Get:
31  *  a = FIELD_GET(REG_FIELD_A, reg);
32  *  b = FIELD_GET(REG_FIELD_B, reg);
33  *
34  * Set:
35  *  reg = FIELD_PREP(REG_FIELD_A, 1) |
36  *        FIELD_PREP(REG_FIELD_B, 0) |
37  *        FIELD_PREP(REG_FIELD_C, c) |
38  *        FIELD_PREP(REG_FIELD_D, 0x40);
39  *
40  * Modify:
41  *  reg &= ~REG_FIELD_C;
42  *  reg |= FIELD_PREP(REG_FIELD_C, c);
43  */
44
45 #define __bf_shf(x) (__builtin_ffsll(x) - 1)
46
47 #define __scalar_type_to_unsigned_cases(type)                           \
48                 unsigned type:  (unsigned type)0,                       \
49                 signed type:    (unsigned type)0
50
51 #define __unsigned_scalar_typeof(x) typeof(                             \
52                 _Generic((x),                                           \
53                         char:   (unsigned char)0,                       \
54                         __scalar_type_to_unsigned_cases(char),          \
55                         __scalar_type_to_unsigned_cases(short),         \
56                         __scalar_type_to_unsigned_cases(int),           \
57                         __scalar_type_to_unsigned_cases(long),          \
58                         __scalar_type_to_unsigned_cases(long long),     \
59                         default: (x)))
60
61 #define __bf_cast_unsigned(type, x)     ((__unsigned_scalar_typeof(type))(x))
62
63 #define __BF_FIELD_CHECK(_mask, _reg, _val, _pfx)                       \
64         ({                                                              \
65                 BUILD_BUG_ON_MSG(!__builtin_constant_p(_mask),          \
66                                  _pfx "mask is not constant");          \
67                 BUILD_BUG_ON_MSG((_mask) == 0, _pfx "mask is zero");    \
68                 BUILD_BUG_ON_MSG(__builtin_constant_p(_val) ?           \
69                                  ~((_mask) >> __bf_shf(_mask)) & (_val) : 0, \
70                                  _pfx "value too large for the field"); \
71                 BUILD_BUG_ON_MSG(__bf_cast_unsigned(_mask, _mask) >     \
72                                  __bf_cast_unsigned(_reg, ~0ull),       \
73                                  _pfx "type of reg too small for mask"); \
74                 __BUILD_BUG_ON_NOT_POWER_OF_2((_mask) +                 \
75                                               (1ULL << __bf_shf(_mask))); \
76         })
77
78 /**
79  * FIELD_MAX() - produce the maximum value representable by a field
80  * @_mask: shifted mask defining the field's length and position
81  *
82  * FIELD_MAX() returns the maximum value that can be held in the field
83  * specified by @_mask.
84  */
85 #define FIELD_MAX(_mask)                                                \
86         ({                                                              \
87                 __BF_FIELD_CHECK(_mask, 0ULL, 0ULL, "FIELD_MAX: ");     \
88                 (typeof(_mask))((_mask) >> __bf_shf(_mask));            \
89         })
90
91 /**
92  * FIELD_FIT() - check if value fits in the field
93  * @_mask: shifted mask defining the field's length and position
94  * @_val:  value to test against the field
95  *
96  * Return: true if @_val can fit inside @_mask, false if @_val is too big.
97  */
98 #define FIELD_FIT(_mask, _val)                                          \
99         ({                                                              \
100                 __BF_FIELD_CHECK(_mask, 0ULL, 0ULL, "FIELD_FIT: ");     \
101                 !((((typeof(_mask))_val) << __bf_shf(_mask)) & ~(_mask)); \
102         })
103
104 /**
105  * FIELD_PREP() - prepare a bitfield element
106  * @_mask: shifted mask defining the field's length and position
107  * @_val:  value to put in the field
108  *
109  * FIELD_PREP() masks and shifts up the value.  The result should
110  * be combined with other fields of the bitfield using logical OR.
111  */
112 #define FIELD_PREP(_mask, _val)                                         \
113         ({                                                              \
114                 __BF_FIELD_CHECK(_mask, 0ULL, _val, "FIELD_PREP: ");    \
115                 ((typeof(_mask))(_val) << __bf_shf(_mask)) & (_mask);   \
116         })
117
118 #define __BF_CHECK_POW2(n)      BUILD_BUG_ON_ZERO(((n) & ((n) - 1)) != 0)
119
120 /**
121  * FIELD_PREP_CONST() - prepare a constant bitfield element
122  * @_mask: shifted mask defining the field's length and position
123  * @_val:  value to put in the field
124  *
125  * FIELD_PREP_CONST() masks and shifts up the value.  The result should
126  * be combined with other fields of the bitfield using logical OR.
127  *
128  * Unlike FIELD_PREP() this is a constant expression and can therefore
129  * be used in initializers. Error checking is less comfortable for this
130  * version, and non-constant masks cannot be used.
131  */
132 #define FIELD_PREP_CONST(_mask, _val)                                   \
133         (                                                               \
134                 /* mask must be non-zero */                             \
135                 BUILD_BUG_ON_ZERO((_mask) == 0) +                       \
136                 /* check if value fits */                               \
137                 BUILD_BUG_ON_ZERO(~((_mask) >> __bf_shf(_mask)) & (_val)) + \
138                 /* check if mask is contiguous */                       \
139                 __BF_CHECK_POW2((_mask) + (1ULL << __bf_shf(_mask))) +  \
140                 /* and create the value */                              \
141                 (((typeof(_mask))(_val) << __bf_shf(_mask)) & (_mask))  \
142         )
143
144 /**
145  * FIELD_GET() - extract a bitfield element
146  * @_mask: shifted mask defining the field's length and position
147  * @_reg:  value of entire bitfield
148  *
149  * FIELD_GET() extracts the field specified by @_mask from the
150  * bitfield passed in as @_reg by masking and shifting it down.
151  */
152 #define FIELD_GET(_mask, _reg)                                          \
153         ({                                                              \
154                 __BF_FIELD_CHECK(_mask, _reg, 0U, "FIELD_GET: ");       \
155                 (typeof(_mask))(((_reg) & (_mask)) >> __bf_shf(_mask)); \
156         })
157
158 extern void __compiletime_error("value doesn't fit into mask")
159 __field_overflow(void);
160 extern void __compiletime_error("bad bitfield mask")
161 __bad_mask(void);
162 static __always_inline u64 field_multiplier(u64 field)
163 {
164         if ((field | (field - 1)) & ((field | (field - 1)) + 1))
165                 __bad_mask();
166         return field & -field;
167 }
168 static __always_inline u64 field_mask(u64 field)
169 {
170         return field / field_multiplier(field);
171 }
172 #define field_max(field)        ((typeof(field))field_mask(field))
173 #define ____MAKE_OP(type,base,to,from)                                  \
174 static __always_inline __##type type##_encode_bits(base v, base field)  \
175 {                                                                       \
176         if (__builtin_constant_p(v) && (v & ~field_mask(field)))        \
177                 __field_overflow();                                     \
178         return to((v & field_mask(field)) * field_multiplier(field));   \
179 }                                                                       \
180 static __always_inline __##type type##_replace_bits(__##type old,       \
181                                         base val, base field)           \
182 {                                                                       \
183         return (old & ~to(field)) | type##_encode_bits(val, field);     \
184 }                                                                       \
185 static __always_inline void type##p_replace_bits(__##type *p,           \
186                                         base val, base field)           \
187 {                                                                       \
188         *p = (*p & ~to(field)) | type##_encode_bits(val, field);        \
189 }                                                                       \
190 static __always_inline base type##_get_bits(__##type v, base field)     \
191 {                                                                       \
192         return (from(v) & field)/field_multiplier(field);               \
193 }
194 #define __MAKE_OP(size)                                                 \
195         ____MAKE_OP(le##size,u##size,cpu_to_le##size,le##size##_to_cpu) \
196         ____MAKE_OP(be##size,u##size,cpu_to_be##size,be##size##_to_cpu) \
197         ____MAKE_OP(u##size,u##size,,)
198 ____MAKE_OP(u8,u8,,)
199 __MAKE_OP(16)
200 __MAKE_OP(32)
201 __MAKE_OP(64)
202 #undef __MAKE_OP
203 #undef ____MAKE_OP
204
205 #endif