xref: /freebsd/sys/contrib/ck/include/gcc/arm/ck_pr.h (revision 1fb62fb074788ca4713551be09d6569966a3abee)
1 /*
2  * Copyright 2009-2015 Samy Al Bahra.
3  * Copyright 2013-2015 Olivier Houchard.
4  * All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  * 1. Redistributions of source code must retain the above copyright
10  *    notice, this list of conditions and the following disclaimer.
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in the
13  *    documentation and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
19  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
25  * SUCH DAMAGE.
26  */
27 
28 #ifndef CK_PR_ARM_H
29 #define CK_PR_ARM_H
30 
31 #ifndef CK_PR_H
32 #error Do not include this file directly, use ck_pr.h
33 #endif
34 
35 #include <ck_cc.h>
36 #include <ck_md.h>
37 
38 /*
39  * The following represent supported atomic operations.
40  * These operations may be emulated.
41  */
42 #include "ck_f_pr.h"
43 
44 /*
45  * Minimum interface requirement met.
46  */
47 #define CK_F_PR
48 
49 CK_CC_INLINE static void
50 ck_pr_stall(void)
51 {
52 
53 	__asm__ __volatile__("" ::: "memory");
54 	return;
55 }
56 
57 #if defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__)
58 #define CK_ISB __asm __volatile("isb" : : "r" (0) : "memory")
59 #define CK_DMB __asm __volatile("dmb" : : "r" (0) : "memory")
60 #define CK_DSB __asm __volatile("dsb" : : "r" (0) : "memory")
61 /* FreeBSD's toolchain doesn't accept dmb st, so use the opcode instead */
62 #ifdef __FreeBSD__
63 #define CK_DMB_ST __asm __volatile(".word 0xf57ff05e" : : "r" (0) : "memory")
64 #else
65 #define CK_DMB_ST __asm __volatile("dmb st" : : "r" (0) : "memory")
66 #endif /* __FreeBSD__ */
67 #else
68 /* armv6 doesn't have dsb/dmb/isb, and no way to wait only for stores */
69 #define CK_ISB \
70     __asm __volatile("mcr p15, 0, %0, c7, c5, 4" : : "r" (0) : "memory")
71 #define CK_DSB \
72     __asm __volatile("mcr p15, 0, %0, c7, c10, 4" : : "r" (0) : "memory")
73 #define CK_DMB  \
74     __asm __volatile("mcr p15, 0, %0, c7, c10, 5" : : "r" (0) : "memory")
75 #define CK_DMB_ST CK_DMB
76 #endif
77 
78 #define CK_PR_FENCE(T, I)				\
79 	CK_CC_INLINE static void			\
80 	ck_pr_fence_strict_##T(void)			\
81 	{						\
82 		I;					\
83 	}
84 
85 CK_PR_FENCE(atomic, CK_DMB_ST)
86 CK_PR_FENCE(atomic_store, CK_DMB_ST)
87 CK_PR_FENCE(atomic_load, CK_DMB_ST)
88 CK_PR_FENCE(store_atomic, CK_DMB_ST)
89 CK_PR_FENCE(load_atomic, CK_DMB)
90 CK_PR_FENCE(store, CK_DMB_ST)
91 CK_PR_FENCE(store_load, CK_DMB)
92 CK_PR_FENCE(load, CK_DMB)
93 CK_PR_FENCE(load_store, CK_DMB)
94 CK_PR_FENCE(memory, CK_DMB)
95 CK_PR_FENCE(acquire, CK_DMB)
96 CK_PR_FENCE(release, CK_DMB)
97 CK_PR_FENCE(acqrel, CK_DMB)
98 CK_PR_FENCE(lock, CK_DMB)
99 CK_PR_FENCE(unlock, CK_DMB)
100 
101 #undef CK_PR_FENCE
102 
103 #undef CK_ISB
104 #undef CK_DSB
105 #undef CK_DMB
106 #undef CK_DMB_ST
107 
108 #define CK_PR_LOAD(S, M, T, C, I)				\
109 	CK_CC_INLINE static T					\
110 	ck_pr_md_load_##S(const M *target)			\
111 	{							\
112 		long r = 0;					\
113 		__asm__ __volatile__(I " %0, [%1];"		\
114 					: "=r" (r)		\
115 					: "r"  (target)		\
116 					: "memory");		\
117 		return ((T)r);					\
118 	}
119 
120 CK_PR_LOAD(ptr, void, void *, uint32_t, "ldr")
121 
122 #define CK_PR_LOAD_S(S, T, I) CK_PR_LOAD(S, T, T, T, I)
123 
124 CK_PR_LOAD_S(32, uint32_t, "ldr")
125 CK_PR_LOAD_S(16, uint16_t, "ldrh")
126 CK_PR_LOAD_S(8, uint8_t, "ldrb")
127 CK_PR_LOAD_S(uint, unsigned int, "ldr")
128 CK_PR_LOAD_S(int, int, "ldr")
129 CK_PR_LOAD_S(short, short, "ldrh")
130 CK_PR_LOAD_S(char, char, "ldrb")
131 
132 #undef CK_PR_LOAD_S
133 #undef CK_PR_LOAD
134 
135 #if defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__)
136 
137 #define CK_PR_DOUBLE_LOAD(T, N) 		\
138 CK_CC_INLINE static T				\
139 ck_pr_md_load_##N(const T *target)		\
140 {						\
141 	register T ret;				\
142 						\
143 	__asm __volatile("ldrexd %0, [%1]" 	\
144 	    : "=&r" (ret)			\
145 	    : "r" (target)			\
146 	    : "memory", "cc");			\
147 	return (ret);				\
148 }
149 
150 CK_PR_DOUBLE_LOAD(uint64_t, 64)
151 CK_PR_DOUBLE_LOAD(double, double)
152 #undef CK_PR_DOUBLE_LOAD
153 #endif
154 
155 #define CK_PR_STORE(S, M, T, C, I)				\
156 	CK_CC_INLINE static void				\
157 	ck_pr_md_store_##S(M *target, T v)			\
158 	{							\
159 		__asm__ __volatile__(I " %1, [%0]"		\
160 					:			\
161 					: "r" (target),		\
162 					  "r" (v)		\
163 					: "memory");		\
164 		return;						\
165 	}
166 
167 CK_PR_STORE(ptr, void, const void *, uint32_t, "str")
168 
169 #define CK_PR_STORE_S(S, T, I) CK_PR_STORE(S, T, T, T, I)
170 
171 CK_PR_STORE_S(32, uint32_t, "str")
172 CK_PR_STORE_S(16, uint16_t, "strh")
173 CK_PR_STORE_S(8, uint8_t, "strb")
174 CK_PR_STORE_S(uint, unsigned int, "str")
175 CK_PR_STORE_S(int, int, "str")
176 CK_PR_STORE_S(short, short, "strh")
177 CK_PR_STORE_S(char, char, "strb")
178 
179 #undef CK_PR_STORE_S
180 #undef CK_PR_STORE
181 
182 #if defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__)
183 
184 #define CK_PR_DOUBLE_STORE(T, N)				\
185 CK_CC_INLINE static void					\
186 ck_pr_md_store_##N(const T *target, T value)			\
187 {								\
188 	T tmp;							\
189 	uint32_t flag;						\
190 	__asm __volatile("1: 		\n"			\
191 	    		 "ldrexd	%0, [%2]\n"		\
192 			 "strexd	%1, %3, [%2]\n"		\
193 			 "teq		%1, #0\n"		\
194 			 "it ne		\n"			\
195 			 "bne		1b\n"			\
196 				: "=&r" (tmp), "=&r" (flag)	\
197 				: "r" (target), "r" (value)	\
198 				: "memory", "cc");		\
199 }
200 
201 CK_PR_DOUBLE_STORE(uint64_t, 64)
202 CK_PR_DOUBLE_STORE(double, double)
203 
204 #undef CK_PR_DOUBLE_STORE
205 
206 #define CK_PR_DOUBLE_CAS_VALUE(T, N)				\
207 CK_CC_INLINE static bool					\
208 ck_pr_cas_##N##_value(T *target, T compare, T set, T *value)	\
209 {								\
210         T previous;						\
211         int tmp;						\
212 								\
213 	__asm__ __volatile__("1:"				\
214 			     "ldrexd %0, [%4];"			\
215 			     "cmp    %Q0, %Q2;"			\
216 			     "ittt eq;"				\
217 			     "cmpeq  %R0, %R2;"			\
218 			     "strexdeq %1, %3, [%4];"		\
219 			     "cmpeq  %1, #1;"			\
220 			     "beq 1b;"				\
221 				:"=&r" (previous), "=&r" (tmp)	\
222 				: "r" (compare), "r" (set) ,	\
223 				  "r"(target)			\
224 				: "memory", "cc");		\
225         *value = previous;					\
226 	return (*value == compare);				\
227 }
228 
229 CK_PR_DOUBLE_CAS_VALUE(uint64_t, 64)
230 CK_PR_DOUBLE_CAS_VALUE(double, double)
231 
232 #undef CK_PR_DOUBLE_CAS_VALUE
233 
234 CK_CC_INLINE static bool
235 ck_pr_cas_ptr_2_value(void *target, void *compare, void *set, void *value)
236 {
237 	uint32_t *_compare = CK_CPP_CAST(uint32_t *, compare);
238 	uint32_t *_set = CK_CPP_CAST(uint32_t *, set);
239 	uint64_t __compare = ((uint64_t)_compare[0]) | ((uint64_t)_compare[1] << 32);
240 	uint64_t __set = ((uint64_t)_set[0]) | ((uint64_t)_set[1] << 32);
241 
242 	return (ck_pr_cas_64_value(CK_CPP_CAST(uint64_t *, target),
243 				   __compare,
244 				   __set,
245 				   CK_CPP_CAST(uint64_t *, value)));
246 }
247 
248 #define CK_PR_DOUBLE_CAS(T, N)  		\
249 CK_CC_INLINE static bool			\
250 ck_pr_cas_##N(T *target, T compare, T set)	\
251 {						\
252 	int ret;				\
253         T tmp;					\
254 						\
255 	__asm__ __volatile__("1:"		\
256 			     "mov %0, #0;"	\
257 			     "ldrexd %1, [%4];"	\
258 			     "cmp    %Q1, %Q2;"	\
259 			     "itttt eq;"	\
260 			     "cmpeq  %R1, %R2;"	\
261 			     "strexdeq %1, %3, [%4];" \
262 			     "moveq %0, #1;"	\
263 			     "cmpeq  %1, #1;"	\
264 			     "beq 1b;"		\
265 			     : "=&r" (ret), "=&r" (tmp) \
266 			     : "r" (compare), "r" (set) , \
267 			       "r"(target)	\
268 			     : "memory", "cc");	\
269 						\
270 	return (ret);				\
271 }
272 
273 CK_PR_DOUBLE_CAS(uint64_t, 64)
274 CK_PR_DOUBLE_CAS(double, double)
275 CK_CC_INLINE static bool
276 ck_pr_cas_ptr_2(void *target, void *compare, void *set)
277 {
278 	uint32_t *_compare = CK_CPP_CAST(uint32_t *, compare);
279 	uint32_t *_set = CK_CPP_CAST(uint32_t *, set);
280 	uint64_t __compare = ((uint64_t)_compare[0]) | ((uint64_t)_compare[1] << 32);
281 	uint64_t __set = ((uint64_t)_set[0]) | ((uint64_t)_set[1] << 32);
282 	return (ck_pr_cas_64(CK_CPP_CAST(uint64_t *, target),
283 			     __compare,
284 			     __set));
285 }
286 
287 #endif
288 
289 CK_CC_INLINE static bool
290 ck_pr_cas_ptr_value(void *target, void *compare, void *set, void *value)
291 {
292 	void *previous, *tmp;
293 	__asm__ __volatile__("1:"
294 			     "ldrex %0, [%2];"
295 			     "cmp   %0, %4;"
296 			     "itt eq;"
297 			     "strexeq %1, %3, [%2];"
298 			     "cmpeq   %1, #1;"
299 			     "beq   1b;"
300 			  	: "=&r" (previous),
301 				  "=&r" (tmp)
302 		  		: "r"   (target),
303 				  "r"   (set),
304 				  "r"   (compare)
305 				: "memory", "cc");
306 	*(void **)value = previous;
307 	return (previous == compare);
308 }
309 
310 CK_CC_INLINE static bool
311 ck_pr_cas_ptr(void *target, void *compare, void *set)
312 {
313 	void *previous, *tmp;
314 	__asm__ __volatile__("1:"
315 			     "ldrex %0, [%2];"
316 			     "cmp   %0, %4;"
317 			     "itt eq;"
318 			     "strexeq %1, %3, [%2];"
319 			     "cmpeq   %1, #1;"
320 			     "beq   1b;"
321 			  	: "=&r" (previous),
322 				  "=&r" (tmp)
323 		  		: "r"   (target),
324 				  "r"   (set),
325 				  "r"   (compare)
326 				: "memory", "cc");
327 	return (previous == compare);
328 }
329 
330 #define CK_PR_CAS(N, T, W)						\
331 	CK_CC_INLINE static bool					\
332 	ck_pr_cas_##N##_value(T *target, T compare, T set, T *value)	\
333 	{								\
334 		T previous = 0, tmp = 0;				\
335 		__asm__ __volatile__("1:"				\
336 				     "ldrex" W " %0, [%2];"		\
337 				     "cmp   %0, %4;"			\
338 				     "itt eq;"				\
339 				     "strex" W "eq %1, %3, [%2];"	\
340 		    		     "cmpeq   %1, #1;"			\
341 				     "beq   1b;"			\
342 			/* 						\
343 			 * Using "+&" instead of "=&" to avoid bogus	\
344 			 * clang warnings.				\
345 			 */						\
346 					: "+&r" (previous),		\
347 		    			  "+&r" (tmp)			\
348 					: "r"   (target),		\
349 					  "r"   (set),			\
350 					  "r"   (compare)		\
351 					: "memory", "cc");		\
352 		*value = previous; 					\
353 		return (previous == compare);				\
354 	}								\
355 	CK_CC_INLINE static bool					\
356 	ck_pr_cas_##N(T *target, T compare, T set)			\
357 	{								\
358 		T previous = 0, tmp = 0;				\
359 		__asm__ __volatile__("1:"				\
360 				     "ldrex" W " %0, [%2];"		\
361 				     "cmp   %0, %4;"			\
362 				     "itt eq;"				\
363 				     "strex" W "eq %1, %3, [%2];"	\
364 				     "cmpeq   %1, #1;"			\
365 				     "beq   1b;"			\
366 					: "+&r" (previous),		\
367 		    			  "+&r" (tmp)			\
368 					: "r"   (target),		\
369 					  "r"   (set),			\
370 					  "r"   (compare)		\
371 					: "memory", "cc");		\
372 		return (previous == compare);				\
373 	}
374 
375 CK_PR_CAS(32, uint32_t, "")
376 CK_PR_CAS(uint, unsigned int, "")
377 CK_PR_CAS(int, int, "")
378 CK_PR_CAS(16, uint16_t, "h")
379 CK_PR_CAS(8, uint8_t, "b")
380 CK_PR_CAS(short, short, "h")
381 CK_PR_CAS(char, char, "b")
382 
383 
384 #undef CK_PR_CAS
385 
386 #define CK_PR_FAS(N, M, T, W)					\
387 	CK_CC_INLINE static T					\
388 	ck_pr_fas_##N(M *target, T v)				\
389 	{							\
390 		T previous = 0;					\
391 		T tmp = 0;					\
392 		__asm__ __volatile__("1:"			\
393 				     "ldrex" W " %0, [%2];"	\
394 				     "strex" W " %1, %3, [%2];"	\
395 		    		     "cmp %1, #0;"		\
396 				     "bne 1b;"			\
397 					: "+&r" (previous),	\
398 		    			  "+&r" (tmp) 		\
399 					: "r"   (target),	\
400 					  "r"   (v)		\
401 					: "memory", "cc");	\
402 		return (previous);				\
403 	}
404 
405 CK_PR_FAS(32, uint32_t, uint32_t, "")
406 CK_PR_FAS(ptr, void, void *, "")
407 CK_PR_FAS(int, int, int, "")
408 CK_PR_FAS(uint, unsigned int, unsigned int, "")
409 CK_PR_FAS(16, uint16_t, uint16_t, "h")
410 CK_PR_FAS(8, uint8_t, uint8_t, "b")
411 CK_PR_FAS(short, short, short, "h")
412 CK_PR_FAS(char, char, char, "b")
413 
414 
415 #undef CK_PR_FAS
416 
417 #define CK_PR_UNARY(O, N, M, T, I, W)				\
418 	CK_CC_INLINE static void				\
419 	ck_pr_##O##_##N(M *target)				\
420 	{							\
421 		T previous = 0;					\
422 		T tmp = 0;					\
423 		__asm__ __volatile__("1:"			\
424 				     "ldrex" W " %0, [%2];"	\
425 				      I ";"			\
426 				     "strex" W " %1, %0, [%2];"	\
427 		    		     "cmp   %1, #0;"		\
428 				     "bne   1b;"		\
429 					: "+&r" (previous),	\
430 		    			  "+&r" (tmp)		\
431 					: "r"   (target)	\
432 					: "memory", "cc");	\
433 		return;						\
434 	}
435 
436 CK_PR_UNARY(inc, ptr, void, void *, "add %0, %0, #1", "")
437 CK_PR_UNARY(dec, ptr, void, void *, "sub %0, %0, #1", "")
438 CK_PR_UNARY(not, ptr, void, void *, "mvn %0, %0", "")
439 CK_PR_UNARY(neg, ptr, void, void *, "neg %0, %0", "")
440 
441 #define CK_PR_UNARY_S(S, T, W)					\
442 	CK_PR_UNARY(inc, S, T, T, "add %0, %0, #1", W)		\
443 	CK_PR_UNARY(dec, S, T, T, "sub %0, %0, #1", W)		\
444 	CK_PR_UNARY(not, S, T, T, "mvn %0, %0", W)		\
445 	CK_PR_UNARY(neg, S, T, T, "neg %0, %0", W)		\
446 
447 CK_PR_UNARY_S(32, uint32_t, "")
448 CK_PR_UNARY_S(uint, unsigned int, "")
449 CK_PR_UNARY_S(int, int, "")
450 CK_PR_UNARY_S(16, uint16_t, "h")
451 CK_PR_UNARY_S(8, uint8_t, "b")
452 CK_PR_UNARY_S(short, short, "h")
453 CK_PR_UNARY_S(char, char, "b")
454 
455 #undef CK_PR_UNARY_S
456 #undef CK_PR_UNARY
457 
458 #define CK_PR_BINARY(O, N, M, T, I, W)				\
459 	CK_CC_INLINE static void				\
460 	ck_pr_##O##_##N(M *target, T delta)			\
461 	{							\
462 		T previous = 0;					\
463 		T tmp = 0;					\
464 		__asm__ __volatile__("1:"			\
465 				     "ldrex" W " %0, [%2];"	\
466 				      I " %0, %0, %3;"		\
467 				     "strex" W " %1, %0, [%2];"	\
468 		    		     "cmp %1, #0;"		\
469 				     "bne 1b;"			\
470 					: "+&r" (previous),	\
471 		    			  "+&r" (tmp)		\
472 					: "r"   (target),	\
473 					  "r"   (delta)		\
474 					: "memory", "cc");	\
475 		return;						\
476 	}
477 
478 CK_PR_BINARY(and, ptr, void, uintptr_t, "and", "")
479 CK_PR_BINARY(add, ptr, void, uintptr_t, "add", "")
480 CK_PR_BINARY(or, ptr, void, uintptr_t, "orr", "")
481 CK_PR_BINARY(sub, ptr, void, uintptr_t, "sub", "")
482 CK_PR_BINARY(xor, ptr, void, uintptr_t, "eor", "")
483 
484 #define CK_PR_BINARY_S(S, T, W)			\
485 	CK_PR_BINARY(and, S, T, T, "and", W)	\
486 	CK_PR_BINARY(add, S, T, T, "add", W)	\
487 	CK_PR_BINARY(or, S, T, T, "orr", W)	\
488 	CK_PR_BINARY(sub, S, T, T, "sub", W)	\
489 	CK_PR_BINARY(xor, S, T, T, "eor", W)
490 
491 CK_PR_BINARY_S(32, uint32_t, "")
492 CK_PR_BINARY_S(uint, unsigned int, "")
493 CK_PR_BINARY_S(int, int, "")
494 CK_PR_BINARY_S(16, uint16_t, "h")
495 CK_PR_BINARY_S(8, uint8_t, "b")
496 CK_PR_BINARY_S(short, short, "h")
497 CK_PR_BINARY_S(char, char, "b")
498 
499 #undef CK_PR_BINARY_S
500 #undef CK_PR_BINARY
501 
502 CK_CC_INLINE static void *
503 ck_pr_faa_ptr(void *target, uintptr_t delta)
504 {
505 	uintptr_t previous, r, tmp;
506 
507 	__asm__ __volatile__("1:"
508 			     "ldrex %0, [%3];"
509 			     "add %1, %4, %0;"
510 			     "strex %2, %1, [%3];"
511 			     "cmp %2, #0;"
512 			     "bne  1b;"
513 				: "=&r" (previous),
514 				  "=&r" (r),
515 				  "=&r" (tmp)
516 				: "r"   (target),
517 				  "r"   (delta)
518 				: "memory", "cc");
519 
520 	return (void *)(previous);
521 }
522 
523 #define CK_PR_FAA(S, T, W)						\
524 	CK_CC_INLINE static T						\
525 	ck_pr_faa_##S(T *target, T delta)				\
526 	{								\
527 		T previous = 0, r = 0, tmp = 0;				\
528 		__asm__ __volatile__("1:"				\
529 				     "ldrex" W " %0, [%3];"		\
530 				     "add %1, %4, %0;"			\
531 				     "strex" W " %2, %1, [%3];"		\
532 		    		     "cmp %2, #0;"			\
533 				     "bne  1b;"				\
534 					: "+&r" (previous),		\
535 					  "+&r" (r),			\
536 		    			  "+&r" (tmp)			\
537 					: "r"   (target),		\
538 					  "r"   (delta)			\
539 					: "memory", "cc");		\
540 		return (previous);					\
541 	}
542 
543 CK_PR_FAA(32, uint32_t, "")
544 CK_PR_FAA(uint, unsigned int, "")
545 CK_PR_FAA(int, int, "")
546 CK_PR_FAA(16, uint16_t, "h")
547 CK_PR_FAA(8, uint8_t, "b")
548 CK_PR_FAA(short, short, "h")
549 CK_PR_FAA(char, char, "b")
550 
551 #undef CK_PR_FAA
552 
553 #endif /* CK_PR_ARM_H */
554 
555