xref: /freebsd/sys/riscv/include/atomic.h (revision 123af6ec70016f5556da5972d4d63c7d175c06d3)
1 /*-
2  * Copyright (c) 2015 Ruslan Bukin <br@bsdpad.com>
3  * All rights reserved.
4  *
5  * Portions of this software were developed by SRI International and the
6  * University of Cambridge Computer Laboratory under DARPA/AFRL contract
7  * FA8750-10-C-0237 ("CTSRD"), as part of the DARPA CRASH research programme.
8  *
9  * Portions of this software were developed by the University of Cambridge
10  * Computer Laboratory as part of the CTSRD Project, with support from the
11  * UK Higher Education Innovation Fund (HEIF).
12  *
13  * Redistribution and use in source and binary forms, with or without
14  * modification, are permitted provided that the following conditions
15  * are met:
16  * 1. Redistributions of source code must retain the above copyright
17  *    notice, this list of conditions and the following disclaimer.
18  * 2. Redistributions in binary form must reproduce the above copyright
19  *    notice, this list of conditions and the following disclaimer in the
20  *    documentation and/or other materials provided with the distribution.
21  *
22  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
23  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
24  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
25  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
26  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
27  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
28  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
29  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
30  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
31  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
32  * SUCH DAMAGE.
33  *
34  * $FreeBSD$
35  */
36 
37 #ifndef	_MACHINE_ATOMIC_H_
38 #define	_MACHINE_ATOMIC_H_
39 
40 #include <sys/atomic_common.h>
41 
42 #define	fence()	__asm __volatile("fence" ::: "memory");
43 #define	mb()	fence()
44 #define	rmb()	fence()
45 #define	wmb()	fence()
46 
47 #define	ATOMIC_ACQ_REL(NAME, WIDTH)					\
48 static __inline  void							\
49 atomic_##NAME##_acq_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
50 {									\
51 	atomic_##NAME##_##WIDTH(p, v);					\
52 	fence(); 							\
53 }									\
54 									\
55 static __inline  void							\
56 atomic_##NAME##_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
57 {									\
58 	fence();							\
59 	atomic_##NAME##_##WIDTH(p, v);					\
60 }
61 
62 static __inline void
63 atomic_add_32(volatile uint32_t *p, uint32_t val)
64 {
65 
66 	__asm __volatile("amoadd.w zero, %1, %0"
67 			: "+A" (*p)
68 			: "r" (val)
69 			: "memory");
70 }
71 
72 static __inline void
73 atomic_subtract_32(volatile uint32_t *p, uint32_t val)
74 {
75 
76 	__asm __volatile("amoadd.w zero, %1, %0"
77 			: "+A" (*p)
78 			: "r" (-val)
79 			: "memory");
80 }
81 
82 static __inline void
83 atomic_set_32(volatile uint32_t *p, uint32_t val)
84 {
85 
86 	__asm __volatile("amoor.w zero, %1, %0"
87 			: "+A" (*p)
88 			: "r" (val)
89 			: "memory");
90 }
91 
92 static __inline void
93 atomic_clear_32(volatile uint32_t *p, uint32_t val)
94 {
95 
96 	__asm __volatile("amoand.w zero, %1, %0"
97 			: "+A" (*p)
98 			: "r" (~val)
99 			: "memory");
100 }
101 
102 static __inline int
103 atomic_cmpset_32(volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
104 {
105 	uint32_t tmp;
106 	int res;
107 
108 	res = 0;
109 
110 	__asm __volatile(
111 		"0:"
112 			"li   %1, 1\n" /* Preset to fail */
113 			"lr.w %0, %2\n"
114 			"bne  %0, %z3, 1f\n"
115 			"sc.w %1, %z4, %2\n"
116 			"bnez %1, 0b\n"
117 		"1:"
118 			: "=&r" (tmp), "=&r" (res), "+A" (*p)
119 			: "rJ" (cmpval), "rJ" (newval)
120 			: "memory");
121 
122 	return (!res);
123 }
124 
125 static __inline int
126 atomic_fcmpset_32(volatile uint32_t *p, uint32_t *cmpval, uint32_t newval)
127 {
128 	uint32_t tmp;
129 	int res;
130 
131 	res = 0;
132 
133 	__asm __volatile(
134 		"0:"
135 			"li   %1, 1\n"		/* Preset to fail */
136 			"lr.w %0, %2\n"		/* Load old value */
137 			"bne  %0, %z4, 1f\n"	/* Compare */
138 			"sc.w %1, %z5, %2\n"	/* Try to store new value */
139 			"j 2f\n"
140 		"1:"
141 			"sw   %0, %3\n"		/* Save old value */
142 		"2:"
143 			: "=&r" (tmp), "=&r" (res), "+A" (*p), "+A" (*cmpval)
144 			: "rJ" (*cmpval), "rJ" (newval)
145 			: "memory");
146 
147 	return (!res);
148 }
149 
150 static __inline uint32_t
151 atomic_fetchadd_32(volatile uint32_t *p, uint32_t val)
152 {
153 	uint32_t ret;
154 
155 	__asm __volatile("amoadd.w %0, %2, %1"
156 			: "=&r" (ret), "+A" (*p)
157 			: "r" (val)
158 			: "memory");
159 
160 	return (ret);
161 }
162 
163 static __inline uint32_t
164 atomic_readandclear_32(volatile uint32_t *p)
165 {
166 	uint32_t ret;
167 	uint32_t val;
168 
169 	val = 0;
170 
171 	__asm __volatile("amoswap.w %0, %2, %1"
172 			: "=&r"(ret), "+A" (*p)
173 			: "r" (val)
174 			: "memory");
175 
176 	return (ret);
177 }
178 
179 #define	atomic_add_int		atomic_add_32
180 #define	atomic_clear_int	atomic_clear_32
181 #define	atomic_cmpset_int	atomic_cmpset_32
182 #define	atomic_fcmpset_int	atomic_fcmpset_32
183 #define	atomic_fetchadd_int	atomic_fetchadd_32
184 #define	atomic_readandclear_int	atomic_readandclear_32
185 #define	atomic_set_int		atomic_set_32
186 #define	atomic_subtract_int	atomic_subtract_32
187 
188 ATOMIC_ACQ_REL(set, 32)
189 ATOMIC_ACQ_REL(clear, 32)
190 ATOMIC_ACQ_REL(add, 32)
191 ATOMIC_ACQ_REL(subtract, 32)
192 
193 static __inline int
194 atomic_cmpset_acq_32(volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
195 {
196 	int res;
197 
198 	res = atomic_cmpset_32(p, cmpval, newval);
199 
200 	fence();
201 
202 	return (res);
203 }
204 
205 static __inline int
206 atomic_cmpset_rel_32(volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
207 {
208 
209 	fence();
210 
211 	return (atomic_cmpset_32(p, cmpval, newval));
212 }
213 
214 static __inline int
215 atomic_fcmpset_acq_32(volatile uint32_t *p, uint32_t *cmpval, uint32_t newval)
216 {
217 	int res;
218 
219 	res = atomic_fcmpset_32(p, cmpval, newval);
220 
221 	fence();
222 
223 	return (res);
224 }
225 
226 static __inline int
227 atomic_fcmpset_rel_32(volatile uint32_t *p, uint32_t *cmpval, uint32_t newval)
228 {
229 
230 	fence();
231 
232 	return (atomic_fcmpset_32(p, cmpval, newval));
233 }
234 
235 static __inline uint32_t
236 atomic_load_acq_32(volatile uint32_t *p)
237 {
238 	uint32_t ret;
239 
240 	ret = *p;
241 
242 	fence();
243 
244 	return (ret);
245 }
246 
247 static __inline void
248 atomic_store_rel_32(volatile uint32_t *p, uint32_t val)
249 {
250 
251 	fence();
252 
253 	*p = val;
254 }
255 
256 #define	atomic_add_acq_int	atomic_add_acq_32
257 #define	atomic_clear_acq_int	atomic_clear_acq_32
258 #define	atomic_cmpset_acq_int	atomic_cmpset_acq_32
259 #define	atomic_fcmpset_acq_int	atomic_fcmpset_acq_32
260 #define	atomic_load_acq_int	atomic_load_acq_32
261 #define	atomic_set_acq_int	atomic_set_acq_32
262 #define	atomic_subtract_acq_int	atomic_subtract_acq_32
263 
264 #define	atomic_add_rel_int	atomic_add_rel_32
265 #define	atomic_clear_rel_int	atomic_add_rel_32
266 #define	atomic_cmpset_rel_int	atomic_cmpset_rel_32
267 #define	atomic_fcmpset_rel_int	atomic_fcmpset_rel_32
268 #define	atomic_set_rel_int	atomic_set_rel_32
269 #define	atomic_subtract_rel_int	atomic_subtract_rel_32
270 #define	atomic_store_rel_int	atomic_store_rel_32
271 
272 static __inline void
273 atomic_add_64(volatile uint64_t *p, uint64_t val)
274 {
275 
276 	__asm __volatile("amoadd.d zero, %1, %0"
277 			: "+A" (*p)
278 			: "r" (val)
279 			: "memory");
280 }
281 
282 static __inline void
283 atomic_subtract_64(volatile uint64_t *p, uint64_t val)
284 {
285 
286 	__asm __volatile("amoadd.d zero, %1, %0"
287 			: "+A" (*p)
288 			: "r" (-val)
289 			: "memory");
290 }
291 
292 static __inline void
293 atomic_set_64(volatile uint64_t *p, uint64_t val)
294 {
295 
296 	__asm __volatile("amoor.d zero, %1, %0"
297 			: "+A" (*p)
298 			: "r" (val)
299 			: "memory");
300 }
301 
302 static __inline void
303 atomic_clear_64(volatile uint64_t *p, uint64_t val)
304 {
305 
306 	__asm __volatile("amoand.d zero, %1, %0"
307 			: "+A" (*p)
308 			: "r" (~val)
309 			: "memory");
310 }
311 
312 static __inline int
313 atomic_cmpset_64(volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
314 {
315 	uint64_t tmp;
316 	int res;
317 
318 	res = 0;
319 
320 	__asm __volatile(
321 		"0:"
322 			"li   %1, 1\n" /* Preset to fail */
323 			"lr.d %0, %2\n"
324 			"bne  %0, %z3, 1f\n"
325 			"sc.d %1, %z4, %2\n"
326 			"bnez %1, 0b\n"
327 		"1:"
328 			: "=&r" (tmp), "=&r" (res), "+A" (*p)
329 			: "rJ" (cmpval), "rJ" (newval)
330 			: "memory");
331 
332 	return (!res);
333 }
334 
335 static __inline int
336 atomic_fcmpset_64(volatile uint64_t *p, uint64_t *cmpval, uint64_t newval)
337 {
338 	uint64_t tmp;
339 	int res;
340 
341 	res = 0;
342 
343 	__asm __volatile(
344 		"0:"
345 			"li   %1, 1\n"		/* Preset to fail */
346 			"lr.d %0, %2\n"		/* Load old value */
347 			"bne  %0, %z4, 1f\n"	/* Compare */
348 			"sc.d %1, %z5, %2\n"	/* Try to store new value */
349 			"j 2f\n"
350 		"1:"
351 			"sd   %0, %3\n"		/* Save old value */
352 		"2:"
353 			: "=&r" (tmp), "=&r" (res), "+A" (*p), "+A" (*cmpval)
354 			: "rJ" (*cmpval), "rJ" (newval)
355 			: "memory");
356 
357 	return (!res);
358 }
359 
360 static __inline uint64_t
361 atomic_fetchadd_64(volatile uint64_t *p, uint64_t val)
362 {
363 	uint64_t ret;
364 
365 	__asm __volatile("amoadd.d %0, %2, %1"
366 			: "=&r" (ret), "+A" (*p)
367 			: "r" (val)
368 			: "memory");
369 
370 	return (ret);
371 }
372 
373 static __inline uint64_t
374 atomic_readandclear_64(volatile uint64_t *p)
375 {
376 	uint64_t ret;
377 	uint64_t val;
378 
379 	val = 0;
380 
381 	__asm __volatile("amoswap.d %0, %2, %1"
382 			: "=&r"(ret), "+A" (*p)
383 			: "r" (val)
384 			: "memory");
385 
386 	return (ret);
387 }
388 
389 static __inline uint32_t
390 atomic_swap_32(volatile uint32_t *p, uint32_t val)
391 {
392 	uint32_t old;
393 
394 	__asm __volatile("amoswap.w %0, %2, %1"
395 			: "=&r"(old), "+A" (*p)
396 			: "r" (val)
397 			: "memory");
398 
399 	return (old);
400 }
401 
402 static __inline uint64_t
403 atomic_swap_64(volatile uint64_t *p, uint64_t val)
404 {
405 	uint64_t old;
406 
407 	__asm __volatile("amoswap.d %0, %2, %1"
408 			: "=&r"(old), "+A" (*p)
409 			: "r" (val)
410 			: "memory");
411 
412 	return (old);
413 }
414 
415 #define	atomic_swap_int			atomic_swap_32
416 
417 #define	atomic_add_long			atomic_add_64
418 #define	atomic_clear_long		atomic_clear_64
419 #define	atomic_cmpset_long		atomic_cmpset_64
420 #define	atomic_fcmpset_long		atomic_fcmpset_64
421 #define	atomic_fetchadd_long		atomic_fetchadd_64
422 #define	atomic_readandclear_long	atomic_readandclear_64
423 #define	atomic_set_long			atomic_set_64
424 #define	atomic_subtract_long		atomic_subtract_64
425 #define	atomic_swap_long		atomic_swap_64
426 
427 #define	atomic_add_ptr			atomic_add_64
428 #define	atomic_clear_ptr		atomic_clear_64
429 #define	atomic_cmpset_ptr		atomic_cmpset_64
430 #define	atomic_fcmpset_ptr		atomic_fcmpset_64
431 #define	atomic_fetchadd_ptr		atomic_fetchadd_64
432 #define	atomic_readandclear_ptr		atomic_readandclear_64
433 #define	atomic_set_ptr			atomic_set_64
434 #define	atomic_subtract_ptr		atomic_subtract_64
435 #define	atomic_swap_ptr			atomic_swap_64
436 
437 ATOMIC_ACQ_REL(set, 64)
438 ATOMIC_ACQ_REL(clear, 64)
439 ATOMIC_ACQ_REL(add, 64)
440 ATOMIC_ACQ_REL(subtract, 64)
441 
442 static __inline int
443 atomic_cmpset_acq_64(volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
444 {
445 	int res;
446 
447 	res = atomic_cmpset_64(p, cmpval, newval);
448 
449 	fence();
450 
451 	return (res);
452 }
453 
454 static __inline int
455 atomic_cmpset_rel_64(volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
456 {
457 
458 	fence();
459 
460 	return (atomic_cmpset_64(p, cmpval, newval));
461 }
462 
463 static __inline int
464 atomic_fcmpset_acq_64(volatile uint64_t *p, uint64_t *cmpval, uint64_t newval)
465 {
466 	int res;
467 
468 	res = atomic_fcmpset_64(p, cmpval, newval);
469 
470 	fence();
471 
472 	return (res);
473 }
474 
475 static __inline int
476 atomic_fcmpset_rel_64(volatile uint64_t *p, uint64_t *cmpval, uint64_t newval)
477 {
478 
479 	fence();
480 
481 	return (atomic_fcmpset_64(p, cmpval, newval));
482 }
483 
484 static __inline uint64_t
485 atomic_load_acq_64(volatile uint64_t *p)
486 {
487 	uint64_t ret;
488 
489 	ret = *p;
490 
491 	fence();
492 
493 	return (ret);
494 }
495 
496 static __inline void
497 atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
498 {
499 
500 	fence();
501 
502 	*p = val;
503 }
504 
505 #define	atomic_add_acq_long		atomic_add_acq_64
506 #define	atomic_clear_acq_long		atomic_add_acq_64
507 #define	atomic_cmpset_acq_long		atomic_cmpset_acq_64
508 #define	atomic_fcmpset_acq_long		atomic_fcmpset_acq_64
509 #define	atomic_load_acq_long		atomic_load_acq_64
510 #define	atomic_set_acq_long		atomic_set_acq_64
511 #define	atomic_subtract_acq_long	atomic_subtract_acq_64
512 
513 #define	atomic_add_acq_ptr		atomic_add_acq_64
514 #define	atomic_clear_acq_ptr		atomic_add_acq_64
515 #define	atomic_cmpset_acq_ptr		atomic_cmpset_acq_64
516 #define	atomic_fcmpset_acq_ptr		atomic_fcmpset_acq_64
517 #define	atomic_load_acq_ptr		atomic_load_acq_64
518 #define	atomic_set_acq_ptr		atomic_set_acq_64
519 #define	atomic_subtract_acq_ptr		atomic_subtract_acq_64
520 
521 #undef ATOMIC_ACQ_REL
522 
523 static __inline void
524 atomic_thread_fence_acq(void)
525 {
526 
527 	fence();
528 }
529 
530 static __inline void
531 atomic_thread_fence_rel(void)
532 {
533 
534 	fence();
535 }
536 
537 static __inline void
538 atomic_thread_fence_acq_rel(void)
539 {
540 
541 	fence();
542 }
543 
544 static __inline void
545 atomic_thread_fence_seq_cst(void)
546 {
547 
548 	fence();
549 }
550 
551 #define	atomic_add_rel_long		atomic_add_rel_64
552 #define	atomic_clear_rel_long		atomic_clear_rel_64
553 
554 #define	atomic_add_rel_long		atomic_add_rel_64
555 #define	atomic_clear_rel_long		atomic_clear_rel_64
556 #define	atomic_cmpset_rel_long		atomic_cmpset_rel_64
557 #define	atomic_fcmpset_rel_long		atomic_fcmpset_rel_64
558 #define	atomic_set_rel_long		atomic_set_rel_64
559 #define	atomic_subtract_rel_long	atomic_subtract_rel_64
560 #define	atomic_store_rel_long		atomic_store_rel_64
561 
562 #define	atomic_add_rel_ptr		atomic_add_rel_64
563 #define	atomic_clear_rel_ptr		atomic_clear_rel_64
564 #define	atomic_cmpset_rel_ptr		atomic_cmpset_rel_64
565 #define	atomic_fcmpset_rel_ptr		atomic_fcmpset_rel_64
566 #define	atomic_set_rel_ptr		atomic_set_rel_64
567 #define	atomic_subtract_rel_ptr		atomic_subtract_rel_64
568 #define	atomic_store_rel_ptr		atomic_store_rel_64
569 
570 #endif /* _MACHINE_ATOMIC_H_ */
571