xref: /linux/tools/testing/selftests/bpf/bpf_atomic.h (revision fa593d0f969dcfa41d390822fdf1a0ab48cd882c)
1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (c) 2025 Meta Platforms, Inc. and affiliates. */
3 #ifndef BPF_ATOMIC_H
4 #define BPF_ATOMIC_H
5 
6 #include <vmlinux.h>
7 #include <bpf/bpf_helpers.h>
8 #include "bpf_experimental.h"
9 
10 extern bool CONFIG_X86_64 __kconfig __weak;
11 
12 /*
13  * __unqual_typeof(x) - Declare an unqualified scalar type, leaving
14  *			non-scalar types unchanged,
15  *
16  * Prefer C11 _Generic for better compile-times and simpler code. Note: 'char'
17  * is not type-compatible with 'signed char', and we define a separate case.
18  *
19  * This is copied verbatim from kernel's include/linux/compiler_types.h, but
20  * with default expression (for pointers) changed from (x) to (typeof(x)0).
21  *
22  * This is because LLVM has a bug where for lvalue (x), it does not get rid of
23  * an extra address_space qualifier, but does in case of rvalue (typeof(x)0).
24  * Hence, for pointers, we need to create an rvalue expression to get the
25  * desired type. See https://github.com/llvm/llvm-project/issues/53400.
26  */
27 #define __scalar_type_to_expr_cases(type) \
28 	unsigned type : (unsigned type)0, signed type : (signed type)0
29 
30 #define __unqual_typeof(x)                              \
31 	typeof(_Generic((x),                            \
32 		char: (char)0,                          \
33 		__scalar_type_to_expr_cases(char),      \
34 		__scalar_type_to_expr_cases(short),     \
35 		__scalar_type_to_expr_cases(int),       \
36 		__scalar_type_to_expr_cases(long),      \
37 		__scalar_type_to_expr_cases(long long), \
38 		default: (typeof(x))0))
39 
40 /* No-op for BPF */
41 #define cpu_relax() ({})
42 
43 #define READ_ONCE(x) (*(volatile typeof(x) *)&(x))
44 
45 #define WRITE_ONCE(x, val) ((*(volatile typeof(x) *)&(x)) = (val))
46 
47 #define cmpxchg(p, old, new) __sync_val_compare_and_swap((p), old, new)
48 
49 #define try_cmpxchg(p, pold, new)                                 \
50 	({                                                        \
51 		__unqual_typeof(*(pold)) __o = *(pold);           \
52 		__unqual_typeof(*(p)) __r = cmpxchg(p, __o, new); \
53 		if (__r != __o)                                   \
54 			*(pold) = __r;                            \
55 		__r == __o;                                       \
56 	})
57 
58 #define try_cmpxchg_relaxed(p, pold, new) try_cmpxchg(p, pold, new)
59 
60 #define try_cmpxchg_acquire(p, pold, new) try_cmpxchg(p, pold, new)
61 
62 #define smp_mb()                                 \
63 	({                                       \
64 		unsigned long __val;             \
65 		__sync_fetch_and_add(&__val, 0); \
66 	})
67 
68 #define smp_rmb()                   \
69 	({                          \
70 		if (!CONFIG_X86_64) \
71 			smp_mb();   \
72 		else                \
73 			barrier();  \
74 	})
75 
76 #define smp_wmb()                   \
77 	({                          \
78 		if (!CONFIG_X86_64) \
79 			smp_mb();   \
80 		else                \
81 			barrier();  \
82 	})
83 
84 /* Control dependency provides LOAD->STORE, provide LOAD->LOAD */
85 #define smp_acquire__after_ctrl_dep() ({ smp_rmb(); })
86 
87 #define smp_load_acquire(p)                                  \
88 	({                                                   \
89 		__unqual_typeof(*(p)) __v = READ_ONCE(*(p)); \
90 		if (!CONFIG_X86_64)                          \
91 			smp_mb();                            \
92 		barrier();                                   \
93 		__v;                                         \
94 	})
95 
96 #define smp_store_release(p, val)      \
97 	({                             \
98 		if (!CONFIG_X86_64)    \
99 			smp_mb();      \
100 		barrier();             \
101 		WRITE_ONCE(*(p), val); \
102 	})
103 
104 #define smp_cond_load_relaxed_label(p, cond_expr, label)                \
105 	({                                                              \
106 		typeof(p) __ptr = (p);                                  \
107 		__unqual_typeof(*(p)) VAL;                              \
108 		for (;;) {                                              \
109 			VAL = (__unqual_typeof(*(p)))READ_ONCE(*__ptr); \
110 			if (cond_expr)                                  \
111 				break;                                  \
112 			cond_break_label(label);                        \
113 			cpu_relax();                                    \
114 		}                                                       \
115 		(typeof(*(p)))VAL;                                      \
116 	})
117 
118 #define smp_cond_load_acquire_label(p, cond_expr, label)                  \
119 	({                                                                \
120 		__unqual_typeof(*p) __val =                               \
121 			smp_cond_load_relaxed_label(p, cond_expr, label); \
122 		smp_acquire__after_ctrl_dep();                            \
123 		(typeof(*(p)))__val;                                      \
124 	})
125 
126 #define atomic_read(p) READ_ONCE((p)->counter)
127 
128 #define atomic_cond_read_relaxed_label(p, cond_expr, label) \
129 	smp_cond_load_relaxed_label(&(p)->counter, cond_expr, label)
130 
131 #define atomic_cond_read_acquire_label(p, cond_expr, label) \
132 	smp_cond_load_acquire_label(&(p)->counter, cond_expr, label)
133 
134 #define atomic_try_cmpxchg_relaxed(p, pold, new) \
135 	try_cmpxchg_relaxed(&(p)->counter, pold, new)
136 
137 #define atomic_try_cmpxchg_acquire(p, pold, new) \
138 	try_cmpxchg_acquire(&(p)->counter, pold, new)
139 
140 #endif /* BPF_ATOMIC_H */
141