1 /* SPDX-License-Identifier: LGPL-2.1-or-later */
2 #pragma once
3
4 #ifndef SD_BOOT
5 #include <assert.h>
6 #endif
7
8 #include <limits.h>
9 #include "types-fundamental.h"
10
11 #define _align_(x) __attribute__((__aligned__(x)))
12 #define _const_ __attribute__((__const__))
13 #define _pure_ __attribute__((__pure__))
14 #define _section_(x) __attribute__((__section__(x)))
15 #define _packed_ __attribute__((__packed__))
16 #define _retain_ __attribute__((__retain__))
17 #define _used_ __attribute__((__used__))
18 #define _unused_ __attribute__((__unused__))
19 #define _cleanup_(x) __attribute__((__cleanup__(x)))
20 #define _likely_(x) (__builtin_expect(!!(x), 1))
21 #define _unlikely_(x) (__builtin_expect(!!(x), 0))
22 #if __GNUC__ >= 7
23 #define _fallthrough_ __attribute__((__fallthrough__))
24 #else
25 #define _fallthrough_
26 #endif
27 /* Define C11 noreturn without <stdnoreturn.h> and even on older gcc
28 * compiler versions */
29 #ifndef _noreturn_
30 #if __STDC_VERSION__ >= 201112L
31 #define _noreturn_ _Noreturn
32 #else
33 #define _noreturn_ __attribute__((__noreturn__))
34 #endif
35 #endif
36
37 #define XSTRINGIFY(x) #x
38 #define STRINGIFY(x) XSTRINGIFY(x)
39
40 #ifndef __COVERITY__
41 # define VOID_0 ((void)0)
42 #else
43 # define VOID_0 ((void*)0)
44 #endif
45
46 #define ELEMENTSOF(x) \
47 (__builtin_choose_expr( \
48 !__builtin_types_compatible_p(typeof(x), typeof(&*(x))), \
49 sizeof(x)/sizeof((x)[0]), \
50 VOID_0))
51
52 #define XCONCATENATE(x, y) x ## y
53 #define CONCATENATE(x, y) XCONCATENATE(x, y)
54
55 #ifdef SD_BOOT
56 _noreturn_ void efi_assert(const char *expr, const char *file, unsigned line, const char *function);
57
58 #ifdef NDEBUG
59 #define assert(expr)
60 #define assert_not_reached() __builtin_unreachable()
61 #else
62 #define assert(expr) ({ _likely_(expr) ? VOID_0 : efi_assert(#expr, __FILE__, __LINE__, __PRETTY_FUNCTION__); })
63 #define assert_not_reached() efi_assert("Code should not be reached", __FILE__, __LINE__, __PRETTY_FUNCTION__)
64 #endif
65 #define static_assert _Static_assert
66 #define assert_se(expr) ({ _likely_(expr) ? VOID_0 : efi_assert(#expr, __FILE__, __LINE__, __PRETTY_FUNCTION__); })
67
68 #define memcpy(a, b, c) CopyMem((a), (b), (c))
69 #define free(a) FreePool(a)
70 #endif
71
72 /* This passes the argument through after (if asserts are enabled) checking that it is not null. */
73 #define ASSERT_PTR(expr) \
74 ({ \
75 typeof(expr) _expr_ = (expr); \
76 assert(_expr_); \
77 _expr_; \
78 })
79
80 #define ASSERT_SE_PTR(expr) \
81 ({ \
82 typeof(expr) _expr_ = (expr); \
83 assert_se(_expr_); \
84 _expr_; \
85 })
86
87 #define assert_cc(expr) static_assert(expr, #expr)
88
89
90 #define UNIQ_T(x, uniq) CONCATENATE(__unique_prefix_, CONCATENATE(x, uniq))
91 #define UNIQ __COUNTER__
92
93 /* Note that this works differently from pthread_once(): this macro does
94 * not synchronize code execution, i.e. code that is run conditionalized
95 * on this macro will run concurrently to all other code conditionalized
96 * the same way, there's no ordering or completion enforced. */
97 #define ONCE __ONCE(UNIQ_T(_once_, UNIQ))
98 #define __ONCE(o) \
99 ({ \
100 static sd_bool (o) = sd_false; \
101 __sync_bool_compare_and_swap(&(o), sd_false, sd_true); \
102 })
103
104 #undef MAX
105 #define MAX(a, b) __MAX(UNIQ, (a), UNIQ, (b))
106 #define __MAX(aq, a, bq, b) \
107 ({ \
108 const typeof(a) UNIQ_T(A, aq) = (a); \
109 const typeof(b) UNIQ_T(B, bq) = (b); \
110 UNIQ_T(A, aq) > UNIQ_T(B, bq) ? UNIQ_T(A, aq) : UNIQ_T(B, bq); \
111 })
112
113 #define IS_UNSIGNED_INTEGER_TYPE(type) \
114 (__builtin_types_compatible_p(typeof(type), unsigned char) || \
115 __builtin_types_compatible_p(typeof(type), unsigned short) || \
116 __builtin_types_compatible_p(typeof(type), unsigned) || \
117 __builtin_types_compatible_p(typeof(type), unsigned long) || \
118 __builtin_types_compatible_p(typeof(type), unsigned long long))
119
120 #define IS_SIGNED_INTEGER_TYPE(type) \
121 (__builtin_types_compatible_p(typeof(type), signed char) || \
122 __builtin_types_compatible_p(typeof(type), signed short) || \
123 __builtin_types_compatible_p(typeof(type), signed) || \
124 __builtin_types_compatible_p(typeof(type), signed long) || \
125 __builtin_types_compatible_p(typeof(type), signed long long))
126
127 /* Evaluates to (void) if _A or _B are not constant or of different types (being integers of different sizes
128 * is also OK as long as the signedness matches) */
129 #define CONST_MAX(_A, _B) \
130 (__builtin_choose_expr( \
131 __builtin_constant_p(_A) && \
132 __builtin_constant_p(_B) && \
133 (__builtin_types_compatible_p(typeof(_A), typeof(_B)) || \
134 (IS_UNSIGNED_INTEGER_TYPE(_A) && IS_UNSIGNED_INTEGER_TYPE(_B)) || \
135 (IS_SIGNED_INTEGER_TYPE(_A) && IS_SIGNED_INTEGER_TYPE(_B))), \
136 ((_A) > (_B)) ? (_A) : (_B), \
137 VOID_0))
138
139 /* takes two types and returns the size of the larger one */
140 #define MAXSIZE(A, B) (sizeof(union _packed_ { typeof(A) a; typeof(B) b; }))
141
142 #define MAX3(x, y, z) \
143 ({ \
144 const typeof(x) _c = MAX(x, y); \
145 MAX(_c, z); \
146 })
147
148 #define MAX4(x, y, z, a) \
149 ({ \
150 const typeof(x) _d = MAX3(x, y, z); \
151 MAX(_d, a); \
152 })
153
154 #undef MIN
155 #define MIN(a, b) __MIN(UNIQ, (a), UNIQ, (b))
156 #define __MIN(aq, a, bq, b) \
157 ({ \
158 const typeof(a) UNIQ_T(A, aq) = (a); \
159 const typeof(b) UNIQ_T(B, bq) = (b); \
160 UNIQ_T(A, aq) < UNIQ_T(B, bq) ? UNIQ_T(A, aq) : UNIQ_T(B, bq); \
161 })
162
163 /* evaluates to (void) if _A or _B are not constant or of different types */
164 #define CONST_MIN(_A, _B) \
165 (__builtin_choose_expr( \
166 __builtin_constant_p(_A) && \
167 __builtin_constant_p(_B) && \
168 __builtin_types_compatible_p(typeof(_A), typeof(_B)), \
169 ((_A) < (_B)) ? (_A) : (_B), \
170 VOID_0))
171
172 #define MIN3(x, y, z) \
173 ({ \
174 const typeof(x) _c = MIN(x, y); \
175 MIN(_c, z); \
176 })
177
178 #define LESS_BY(a, b) __LESS_BY(UNIQ, (a), UNIQ, (b))
179 #define __LESS_BY(aq, a, bq, b) \
180 ({ \
181 const typeof(a) UNIQ_T(A, aq) = (a); \
182 const typeof(b) UNIQ_T(B, bq) = (b); \
183 UNIQ_T(A, aq) > UNIQ_T(B, bq) ? UNIQ_T(A, aq) - UNIQ_T(B, bq) : 0; \
184 })
185
186 #define CMP(a, b) __CMP(UNIQ, (a), UNIQ, (b))
187 #define __CMP(aq, a, bq, b) \
188 ({ \
189 const typeof(a) UNIQ_T(A, aq) = (a); \
190 const typeof(b) UNIQ_T(B, bq) = (b); \
191 UNIQ_T(A, aq) < UNIQ_T(B, bq) ? -1 : \
192 UNIQ_T(A, aq) > UNIQ_T(B, bq) ? 1 : 0; \
193 })
194
195 #undef CLAMP
196 #define CLAMP(x, low, high) __CLAMP(UNIQ, (x), UNIQ, (low), UNIQ, (high))
197 #define __CLAMP(xq, x, lowq, low, highq, high) \
198 ({ \
199 const typeof(x) UNIQ_T(X, xq) = (x); \
200 const typeof(low) UNIQ_T(LOW, lowq) = (low); \
201 const typeof(high) UNIQ_T(HIGH, highq) = (high); \
202 UNIQ_T(X, xq) > UNIQ_T(HIGH, highq) ? \
203 UNIQ_T(HIGH, highq) : \
204 UNIQ_T(X, xq) < UNIQ_T(LOW, lowq) ? \
205 UNIQ_T(LOW, lowq) : \
206 UNIQ_T(X, xq); \
207 })
208
209 /* [(x + y - 1) / y] suffers from an integer overflow, even though the
210 * computation should be possible in the given type. Therefore, we use
211 * [x / y + !!(x % y)]. Note that on "Real CPUs" a division returns both the
212 * quotient and the remainder, so both should be equally fast. */
213 #define DIV_ROUND_UP(x, y) __DIV_ROUND_UP(UNIQ, (x), UNIQ, (y))
214 #define __DIV_ROUND_UP(xq, x, yq, y) \
215 ({ \
216 const typeof(x) UNIQ_T(X, xq) = (x); \
217 const typeof(y) UNIQ_T(Y, yq) = (y); \
218 (UNIQ_T(X, xq) / UNIQ_T(Y, yq) + !!(UNIQ_T(X, xq) % UNIQ_T(Y, yq))); \
219 })
220
221 #define CASE_F(X) case X:
222 #define CASE_F_1(CASE, X) CASE_F(X)
223 #define CASE_F_2(CASE, X, ...) CASE(X) CASE_F_1(CASE, __VA_ARGS__)
224 #define CASE_F_3(CASE, X, ...) CASE(X) CASE_F_2(CASE, __VA_ARGS__)
225 #define CASE_F_4(CASE, X, ...) CASE(X) CASE_F_3(CASE, __VA_ARGS__)
226 #define CASE_F_5(CASE, X, ...) CASE(X) CASE_F_4(CASE, __VA_ARGS__)
227 #define CASE_F_6(CASE, X, ...) CASE(X) CASE_F_5(CASE, __VA_ARGS__)
228 #define CASE_F_7(CASE, X, ...) CASE(X) CASE_F_6(CASE, __VA_ARGS__)
229 #define CASE_F_8(CASE, X, ...) CASE(X) CASE_F_7(CASE, __VA_ARGS__)
230 #define CASE_F_9(CASE, X, ...) CASE(X) CASE_F_8(CASE, __VA_ARGS__)
231 #define CASE_F_10(CASE, X, ...) CASE(X) CASE_F_9(CASE, __VA_ARGS__)
232 #define CASE_F_11(CASE, X, ...) CASE(X) CASE_F_10(CASE, __VA_ARGS__)
233 #define CASE_F_12(CASE, X, ...) CASE(X) CASE_F_11(CASE, __VA_ARGS__)
234 #define CASE_F_13(CASE, X, ...) CASE(X) CASE_F_12(CASE, __VA_ARGS__)
235 #define CASE_F_14(CASE, X, ...) CASE(X) CASE_F_13(CASE, __VA_ARGS__)
236 #define CASE_F_15(CASE, X, ...) CASE(X) CASE_F_14(CASE, __VA_ARGS__)
237 #define CASE_F_16(CASE, X, ...) CASE(X) CASE_F_15(CASE, __VA_ARGS__)
238 #define CASE_F_17(CASE, X, ...) CASE(X) CASE_F_16(CASE, __VA_ARGS__)
239 #define CASE_F_18(CASE, X, ...) CASE(X) CASE_F_17(CASE, __VA_ARGS__)
240 #define CASE_F_19(CASE, X, ...) CASE(X) CASE_F_18(CASE, __VA_ARGS__)
241 #define CASE_F_20(CASE, X, ...) CASE(X) CASE_F_19(CASE, __VA_ARGS__)
242
243 #define GET_CASE_F(_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14,_15,_16,_17,_18,_19,_20,NAME,...) NAME
244 #define FOR_EACH_MAKE_CASE(...) \
245 GET_CASE_F(__VA_ARGS__,CASE_F_20,CASE_F_19,CASE_F_18,CASE_F_17,CASE_F_16,CASE_F_15,CASE_F_14,CASE_F_13,CASE_F_12,CASE_F_11, \
246 CASE_F_10,CASE_F_9,CASE_F_8,CASE_F_7,CASE_F_6,CASE_F_5,CASE_F_4,CASE_F_3,CASE_F_2,CASE_F_1) \
247 (CASE_F,__VA_ARGS__)
248
249 #define IN_SET(x, ...) \
250 ({ \
251 sd_bool _found = sd_false; \
252 /* If the build breaks in the line below, you need to extend the case macros. (We use "long double" as \
253 * type for the array, in the hope that checkers such as ubsan don't complain that the initializers for \
254 * the array are not representable by the base type. Ideally we'd use typeof(x) as base type, but that \
255 * doesn't work, as we want to use this on bitfields and gcc refuses typeof() on bitfields.) */ \
256 static const long double __assert_in_set[] _unused_ = { __VA_ARGS__ }; \
257 assert_cc(ELEMENTSOF(__assert_in_set) <= 20); \
258 switch (x) { \
259 FOR_EACH_MAKE_CASE(__VA_ARGS__) \
260 _found = sd_true; \
261 break; \
262 default: \
263 break; \
264 } \
265 _found; \
266 })
267
268 /* Takes inspiration from Rust's Option::take() method: reads and returns a pointer, but at the same time
269 * resets it to NULL. See: https://doc.rust-lang.org/std/option/enum.Option.html#method.take */
270 #define TAKE_PTR(ptr) \
271 ({ \
272 typeof(ptr) *_pptr_ = &(ptr); \
273 typeof(ptr) _ptr_ = *_pptr_; \
274 *_pptr_ = NULL; \
275 _ptr_; \
276 })
277
278 /*
279 * STRLEN - return the length of a string literal, minus the trailing NUL byte.
280 * Contrary to strlen(), this is a constant expression.
281 * @x: a string literal.
282 */
283 #define STRLEN(x) (sizeof(""x"") - sizeof(typeof(x[0])))
284
285 #define mfree(memory) \
286 ({ \
287 free(memory); \
288 (typeof(memory)) NULL; \
289 })
290
ALIGN_TO(size_t l,size_t ali)291 static inline size_t ALIGN_TO(size_t l, size_t ali) {
292 /* sd-boot uses UINTN for size_t, let's make sure SIZE_MAX is correct. */
293 assert_cc(SIZE_MAX == ~(size_t)0);
294
295 /* Check that alignment is exponent of 2 */
296 #if SIZE_MAX == UINT_MAX
297 assert(__builtin_popcount(ali) == 1);
298 #elif SIZE_MAX == ULONG_MAX
299 assert(__builtin_popcountl(ali) == 1);
300 #elif SIZE_MAX == ULLONG_MAX
301 assert(__builtin_popcountll(ali) == 1);
302 #else
303 #error "Unexpected size_t"
304 #endif
305
306 if (l > SIZE_MAX - (ali - 1))
307 return SIZE_MAX; /* indicate overflow */
308
309 return ((l + ali - 1) & ~(ali - 1));
310 }
311
312 /* Same as ALIGN_TO but callable in constant contexts. */
313 #define CONST_ALIGN_TO(l, ali) \
314 __builtin_choose_expr( \
315 __builtin_constant_p(l) && \
316 __builtin_constant_p(ali) && \
317 __builtin_popcountll(ali) == 1 && /* is power of 2? */ \
318 (l <= SIZE_MAX - (ali - 1)), /* overflow? */ \
319 ((l) + (ali) - 1) & ~((ali) - 1), \
320 VOID_0)
321
322 #define UPDATE_FLAG(orig, flag, b) \
323 ((b) ? ((orig) | (flag)) : ((orig) & ~(flag)))
324 #define SET_FLAG(v, flag, b) \
325 (v) = UPDATE_FLAG(v, flag, b)
326 #define FLAGS_SET(v, flags) \
327 ((~(v) & (flags)) == 0)
328