xref: /freebsd/sys/contrib/ck/include/gcc/ck_pr.h (revision 2e3507c25e42292b45a5482e116d278f5515d04d)
1 /*
2  * Copyright 2010 Samy Al Bahra.
3  * All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24  * SUCH DAMAGE.
25  */
26 
27 #ifndef CK_PR_GCC_H
28 #define CK_PR_GCC_H
29 
30 #ifndef CK_PR_H
31 #error Do not include this file directly, use ck_pr.h
32 #endif
33 
34 #include <ck_cc.h>
35 
36 CK_CC_INLINE static void
37 ck_pr_barrier(void)
38 {
39 
40 	__asm__ __volatile__("" ::: "memory");
41 	return;
42 }
43 
44 #ifndef CK_F_PR
45 #define CK_F_PR
46 
47 #include <ck_stdbool.h>
48 #include <ck_stdint.h>
49 
50 /*
51  * The following represent supported atomic operations.
52  * These operations may be emulated.
53  */
54 #include "ck_f_pr.h"
55 
56 #define CK_PR_ACCESS(x) (*(volatile __typeof__(x) *)&(x))
57 
58 #define CK_PR_LOAD(S, M, T)		 			\
59 	CK_CC_INLINE static T					\
60 	ck_pr_md_load_##S(const M *target)			\
61 	{							\
62 		T r;						\
63 		ck_pr_barrier();				\
64 		r = CK_PR_ACCESS(*(const T *)target);		\
65 		ck_pr_barrier();				\
66 		return (r);					\
67 	}							\
68 	CK_CC_INLINE static void				\
69 	ck_pr_md_store_##S(M *target, T v)			\
70 	{							\
71 		ck_pr_barrier();				\
72 		CK_PR_ACCESS(*(T *)target) = v;			\
73 		ck_pr_barrier();				\
74 		return;						\
75 	}
76 
77 CK_CC_INLINE static void *
78 ck_pr_md_load_ptr(const void *target)
79 {
80 	void *r;
81 
82 	ck_pr_barrier();
83 	r = CK_CC_DECONST_PTR(*(volatile void *const*)(target));
84 	ck_pr_barrier();
85 
86 	return r;
87 }
88 
89 CK_CC_INLINE static void
90 ck_pr_md_store_ptr(void *target, const void *v)
91 {
92 
93 	ck_pr_barrier();
94 	*(volatile void **)target = CK_CC_DECONST_PTR(v);
95 	ck_pr_barrier();
96 	return;
97 }
98 
99 #define CK_PR_LOAD_S(S, T) CK_PR_LOAD(S, T, T)
100 
101 CK_PR_LOAD_S(char, char)
102 CK_PR_LOAD_S(uint, unsigned int)
103 CK_PR_LOAD_S(int, int)
104 #ifndef CK_PR_DISABLE_DOUBLE
105 CK_PR_LOAD_S(double, double)
106 #endif
107 CK_PR_LOAD_S(64, uint64_t)
108 CK_PR_LOAD_S(32, uint32_t)
109 CK_PR_LOAD_S(16, uint16_t)
110 CK_PR_LOAD_S(8,  uint8_t)
111 
112 #undef CK_PR_LOAD_S
113 #undef CK_PR_LOAD
114 
115 CK_CC_INLINE static void
116 ck_pr_stall(void)
117 {
118 
119 	ck_pr_barrier();
120 }
121 
122 /*
123  * Load and store fences are equivalent to full fences in the GCC port.
124  */
125 #define CK_PR_FENCE(T)					\
126 	CK_CC_INLINE static void			\
127 	ck_pr_fence_strict_##T(void)			\
128 	{						\
129 		__sync_synchronize();			\
130 	}
131 
132 CK_PR_FENCE(atomic)
133 CK_PR_FENCE(atomic_atomic)
134 CK_PR_FENCE(atomic_load)
135 CK_PR_FENCE(atomic_store)
136 CK_PR_FENCE(store_atomic)
137 CK_PR_FENCE(load_atomic)
138 CK_PR_FENCE(load)
139 CK_PR_FENCE(load_load)
140 CK_PR_FENCE(load_store)
141 CK_PR_FENCE(store)
142 CK_PR_FENCE(store_store)
143 CK_PR_FENCE(store_load)
144 CK_PR_FENCE(memory)
145 CK_PR_FENCE(acquire)
146 CK_PR_FENCE(release)
147 CK_PR_FENCE(acqrel)
148 CK_PR_FENCE(lock)
149 CK_PR_FENCE(unlock)
150 
151 #undef CK_PR_FENCE
152 
153 /*
154  * Atomic compare and swap.
155  */
156 #define CK_PR_CAS(S, M, T)							\
157 	CK_CC_INLINE static bool						\
158 	ck_pr_cas_##S(M *target, T compare, T set)				\
159 	{									\
160 		bool z;								\
161 		z = __sync_bool_compare_and_swap((T *)target, compare, set);	\
162 		return z;							\
163 	}
164 
165 CK_PR_CAS(ptr, void, void *)
166 
167 #define CK_PR_CAS_S(S, T) CK_PR_CAS(S, T, T)
168 
169 CK_PR_CAS_S(char, char)
170 CK_PR_CAS_S(int, int)
171 CK_PR_CAS_S(uint, unsigned int)
172 CK_PR_CAS_S(64, uint64_t)
173 CK_PR_CAS_S(32, uint32_t)
174 CK_PR_CAS_S(16, uint16_t)
175 CK_PR_CAS_S(8,  uint8_t)
176 
177 #undef CK_PR_CAS_S
178 #undef CK_PR_CAS
179 
180 /*
181  * Compare and swap, set *v to old value of target.
182  */
183 CK_CC_INLINE static bool
184 ck_pr_cas_ptr_value(void *target, void *compare, void *set, void *v)
185 {
186 	set = __sync_val_compare_and_swap((void **)target, compare, set);
187 	*(void **)v = set;
188 	return (set == compare);
189 }
190 
191 #define CK_PR_CAS_O(S, T)						\
192 	CK_CC_INLINE static bool					\
193 	ck_pr_cas_##S##_value(T *target, T compare, T set, T *v)	\
194 	{								\
195 		set = __sync_val_compare_and_swap(target, compare, set);\
196 		*v = set;						\
197 		return (set == compare);				\
198 	}
199 
200 CK_PR_CAS_O(char, char)
201 CK_PR_CAS_O(int, int)
202 CK_PR_CAS_O(uint, unsigned int)
203 CK_PR_CAS_O(64, uint64_t)
204 CK_PR_CAS_O(32, uint32_t)
205 CK_PR_CAS_O(16, uint16_t)
206 CK_PR_CAS_O(8,  uint8_t)
207 
208 #undef CK_PR_CAS_O
209 
210 /*
211  * Atomic fetch-and-add operations.
212  */
213 #define CK_PR_FAA(S, M, T)					\
214 	CK_CC_INLINE static T					\
215 	ck_pr_faa_##S(M *target, T d)				\
216 	{							\
217 		d = __sync_fetch_and_add((T *)target, d);	\
218 		return (d);					\
219 	}
220 
221 CK_PR_FAA(ptr, void, void *)
222 
223 #define CK_PR_FAA_S(S, T) CK_PR_FAA(S, T, T)
224 
225 CK_PR_FAA_S(char, char)
226 CK_PR_FAA_S(uint, unsigned int)
227 CK_PR_FAA_S(int, int)
228 CK_PR_FAA_S(64, uint64_t)
229 CK_PR_FAA_S(32, uint32_t)
230 CK_PR_FAA_S(16, uint16_t)
231 CK_PR_FAA_S(8,  uint8_t)
232 
233 #undef CK_PR_FAA_S
234 #undef CK_PR_FAA
235 
236 /*
237  * Atomic store-only binary operations.
238  */
239 #define CK_PR_BINARY(K, S, M, T)				\
240 	CK_CC_INLINE static void				\
241 	ck_pr_##K##_##S(M *target, T d)				\
242 	{							\
243 		d = __sync_fetch_and_##K((T *)target, d);	\
244 		return;						\
245 	}
246 
247 #define CK_PR_BINARY_S(K, S, T) CK_PR_BINARY(K, S, T, T)
248 
249 #define CK_PR_GENERATE(K)			\
250 	CK_PR_BINARY(K, ptr, void, void *)	\
251 	CK_PR_BINARY_S(K, char, char)		\
252 	CK_PR_BINARY_S(K, int, int)		\
253 	CK_PR_BINARY_S(K, uint, unsigned int)	\
254 	CK_PR_BINARY_S(K, 64, uint64_t)		\
255 	CK_PR_BINARY_S(K, 32, uint32_t)		\
256 	CK_PR_BINARY_S(K, 16, uint16_t)		\
257 	CK_PR_BINARY_S(K, 8, uint8_t)
258 
259 CK_PR_GENERATE(add)
260 CK_PR_GENERATE(sub)
261 CK_PR_GENERATE(and)
262 CK_PR_GENERATE(or)
263 CK_PR_GENERATE(xor)
264 
265 #undef CK_PR_GENERATE
266 #undef CK_PR_BINARY_S
267 #undef CK_PR_BINARY
268 
269 #define CK_PR_UNARY(S, M, T)			\
270 	CK_CC_INLINE static void		\
271 	ck_pr_inc_##S(M *target)		\
272 	{					\
273 		ck_pr_add_##S(target, (T)1);	\
274 		return;				\
275 	}					\
276 	CK_CC_INLINE static void		\
277 	ck_pr_dec_##S(M *target)		\
278 	{					\
279 		ck_pr_sub_##S(target, (T)1);	\
280 		return;				\
281 	}
282 
283 #define CK_PR_UNARY_S(S, M) CK_PR_UNARY(S, M, M)
284 
285 CK_PR_UNARY(ptr, void, void *)
286 CK_PR_UNARY_S(char, char)
287 CK_PR_UNARY_S(int, int)
288 CK_PR_UNARY_S(uint, unsigned int)
289 CK_PR_UNARY_S(64, uint64_t)
290 CK_PR_UNARY_S(32, uint32_t)
291 CK_PR_UNARY_S(16, uint16_t)
292 CK_PR_UNARY_S(8, uint8_t)
293 
294 #undef CK_PR_UNARY_S
295 #undef CK_PR_UNARY
296 #endif /* !CK_F_PR */
297 #endif /* CK_PR_GCC_H */
298