xref: /freebsd/sys/arm64/include/atomic.h (revision 952364486a4b9d135e4b28f7f88a8703a74eae6f)
1 /*-
2  * Copyright (c) 2013 Andrew Turner <andrew@freebsd.org>
3  * All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24  * SUCH DAMAGE.
25  *
26  * $FreeBSD$
27  */
28 
29 #ifndef	_MACHINE_ATOMIC_H_
30 #define	_MACHINE_ATOMIC_H_
31 
32 #define	isb()  __asm __volatile("isb" : : : "memory")
33 #define	dsb()  __asm __volatile("dsb sy" : : : "memory")
34 #define	dmb()  __asm __volatile("dmb sy" : : : "memory")
35 
36 #define	mb()   dmb()
37 #define	wmb()  dmb()
38 #define	rmb()  dmb()
39 
40 static __inline void
41 atomic_add_32(volatile uint32_t *p, uint32_t val)
42 {
43 	uint32_t tmp;
44 	int res;
45 
46 	__asm __volatile(
47 	    "1: ldxr	%w0, [%2]      \n"
48 	    "   add	%w0, %w0, %w3  \n"
49 	    "   stxr	%w1, %w0, [%2] \n"
50             "   cbnz	%w1, 1b        \n"
51 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc"
52 	);
53 }
54 
55 static __inline void
56 atomic_clear_32(volatile uint32_t *p, uint32_t val)
57 {
58 	uint32_t tmp;
59 	int res;
60 
61 	__asm __volatile(
62 	    "1: ldxr	%w0, [%2]      \n"
63 	    "   bic	%w0, %w0, %w3  \n"
64 	    "   stxr	%w1, %w0, [%2] \n"
65             "   cbnz	%w1, 1b        \n"
66 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc"
67 	);
68 }
69 
70 static __inline int
71 atomic_cmpset_32(volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
72 {
73 	uint32_t tmp;
74 	int res;
75 
76 	__asm __volatile(
77 	    "1: mov	%w1, #1        \n"
78 	    "   ldxr	%w0, [%2]      \n"
79 	    "   cmp	%w0, %w3       \n"
80 	    "   b.ne	2f             \n"
81 	    "   stxr	%w1, %w4, [%2] \n"
82             "   cbnz	%w1, 1b        \n"
83 	    "2:"
84 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (cmpval), "+r" (newval)
85 	    : : "cc"
86 	);
87 
88 	return (!res);
89 }
90 
91 static __inline uint32_t
92 atomic_fetchadd_32(volatile uint32_t *p, uint32_t val)
93 {
94 	uint32_t tmp, ret;
95 	int res;
96 
97 	__asm __volatile(
98 	    "1: ldxr	%w4, [%2]      \n"
99 	    "   add	%w0, %w4, %w3  \n"
100 	    "   stxr	%w1, %w0, [%2] \n"
101             "   cbnz	%w1, 1b        \n"
102 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val), "=&r"(ret) : : "cc"
103 	);
104 
105 	return (ret);
106 }
107 
108 static __inline uint32_t
109 atomic_readandclear_32(volatile uint32_t *p)
110 {
111 	uint32_t tmp, ret;
112 	int res;
113 
114 	__asm __volatile(
115 	    "   mov	%w0, #0        \n"
116 	    "1: ldxr	%w3, [%2]      \n"
117 	    "   stxr	%w1, %w0, [%2] \n"
118             "   cbnz	%w1, 1b        \n"
119 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "=&r"(ret) : : "cc"
120 	);
121 
122 	return (ret);
123 }
124 
125 static __inline void
126 atomic_set_32(volatile uint32_t *p, uint32_t val)
127 {
128 	uint32_t tmp;
129 	int res;
130 
131 	__asm __volatile(
132 	    "1: ldxr	%w0, [%2]      \n"
133 	    "   orr	%w0, %w0, %w3  \n"
134 	    "   stxr	%w1, %w0, [%2] \n"
135             "   cbnz	%w1, 1b        \n"
136 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc"
137 	);
138 }
139 
140 static __inline void
141 atomic_subtract_32(volatile uint32_t *p, uint32_t val)
142 {
143 	uint32_t tmp;
144 	int res;
145 
146 	__asm __volatile(
147 	    "1: ldxr	%w0, [%2]      \n"
148 	    "   sub	%w0, %w0, %w3  \n"
149 	    "   stxr	%w1, %w0, [%2] \n"
150             "   cbnz	%w1, 1b        \n"
151 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc"
152 	);
153 }
154 
155 #define	atomic_add_int		atomic_add_32
156 #define	atomic_clear_int	atomic_clear_32
157 #define	atomic_cmpset_int	atomic_cmpset_32
158 #define	atomic_fetchadd_int	atomic_fetchadd_32
159 #define	atomic_readandclear_int	atomic_readandclear_32
160 #define	atomic_set_int		atomic_set_32
161 #define	atomic_subtract_int	atomic_subtract_32
162 
163 static __inline void
164 atomic_add_acq_32(volatile uint32_t *p, uint32_t val)
165 {
166 	uint32_t tmp;
167 	int res;
168 
169 	__asm __volatile(
170 	    "1: ldaxr	%w0, [%2]      \n"
171 	    "   add	%w0, %w0, %w3  \n"
172 	    "   stxr	%w1, %w0, [%2] \n"
173             "   cbnz	%w1, 1b        \n"
174 	    "2:"
175 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc", "memory"
176 	);
177 }
178 
179 static __inline void
180 atomic_clear_acq_32(volatile uint32_t *p, uint32_t val)
181 {
182 	uint32_t tmp;
183 	int res;
184 
185 	__asm __volatile(
186 	    "1: ldaxr	%w0, [%2]      \n"
187 	    "   bic	%w0, %w0, %w3  \n"
188 	    "   stxr	%w1, %w0, [%2] \n"
189             "   cbnz	%w1, 1b        \n"
190 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc", "memory"
191 	);
192 }
193 
194 static __inline int
195 atomic_cmpset_acq_32(volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
196 {
197 	uint32_t tmp;
198 	int res;
199 
200 	__asm __volatile(
201 	    "1: mov	%w1, #1        \n"
202 	    "   ldaxr	%w0, [%2]      \n"
203 	    "   cmp	%w0, %w3       \n"
204 	    "   b.ne	2f             \n"
205 	    "   stxr	%w1, %w4, [%2] \n"
206             "   cbnz	%w1, 1b        \n"
207 	    "2:"
208 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (cmpval), "+r" (newval)
209 	    : : "cc", "memory"
210 	);
211 
212 	return (!res);
213 }
214 
215 static __inline uint32_t
216 atomic_load_acq_32(volatile uint32_t *p)
217 {
218 	uint32_t ret;
219 
220 	__asm __volatile(
221 	    "ldar	%w0, [%1] \n"
222 	    : "=&r" (ret) : "r" (p) : "memory");
223 
224 	return (ret);
225 }
226 
227 static __inline void
228 atomic_set_acq_32(volatile uint32_t *p, uint32_t val)
229 {
230 	uint32_t tmp;
231 	int res;
232 
233 	__asm __volatile(
234 	    "1: ldaxr	%w0, [%2]      \n"
235 	    "   orr	%w0, %w0, %w3  \n"
236 	    "   stxr	%w1, %w0, [%2] \n"
237             "   cbnz	%w1, 1b        \n"
238 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc", "memory"
239 	);
240 }
241 
242 static __inline void
243 atomic_subtract_acq_32(volatile uint32_t *p, uint32_t val)
244 {
245 	uint32_t tmp;
246 	int res;
247 
248 	__asm __volatile(
249 	    "1: ldaxr	%w0, [%2]      \n"
250 	    "   sub	%w0, %w0, %w3  \n"
251 	    "   stxr	%w1, %w0, [%2] \n"
252             "   cbnz	%w1, 1b        \n"
253 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc", "memory"
254 	);
255 }
256 
257 #define	atomic_add_acq_int	atomic_add_acq_32
258 #define	atomic_clear_acq_int	atomic_clear_acq_32
259 #define	atomic_cmpset_acq_int	atomic_cmpset_acq_32
260 #define	atomic_load_acq_int	atomic_load_acq_32
261 #define	atomic_set_acq_int	atomic_set_acq_32
262 #define	atomic_subtract_acq_int	atomic_subtract_acq_32
263 
264 /* The atomic functions currently are both acq and rel, we should fix this. */
265 
266 static __inline void
267 atomic_add_rel_32(volatile uint32_t *p, uint32_t val)
268 {
269 	uint32_t tmp;
270 	int res;
271 
272 	__asm __volatile(
273 	    "1: ldxr	%w0, [%2]      \n"
274 	    "   add	%w0, %w0, %w3  \n"
275 	    "   stlxr	%w1, %w0, [%2] \n"
276             "   cbnz	%w1, 1b        \n"
277 	    "2:"
278 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc", "memory"
279 	);
280 }
281 
282 static __inline void
283 atomic_clear_rel_32(volatile uint32_t *p, uint32_t val)
284 {
285 	uint32_t tmp;
286 	int res;
287 
288 	__asm __volatile(
289 	    "1: ldxr	%w0, [%2]      \n"
290 	    "   bic	%w0, %w0, %w3  \n"
291 	    "   stlxr	%w1, %w0, [%2] \n"
292             "   cbnz	%w1, 1b        \n"
293 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc", "memory"
294 	);
295 }
296 
297 static __inline int
298 atomic_cmpset_rel_32(volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
299 {
300 	uint32_t tmp;
301 	int res;
302 
303 	__asm __volatile(
304 	    "1: mov	%w1, #1        \n"
305 	    "   ldxr	%w0, [%2]      \n"
306 	    "   cmp	%w0, %w3       \n"
307 	    "   b.ne	2f             \n"
308 	    "   stlxr	%w1, %w4, [%2] \n"
309             "   cbnz	%w1, 1b        \n"
310 	    "2:"
311 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (cmpval), "+r" (newval)
312 	    : : "cc", "memory"
313 	);
314 
315 	return (!res);
316 }
317 
318 static __inline void
319 atomic_set_rel_32(volatile uint32_t *p, uint32_t val)
320 {
321 	uint32_t tmp;
322 	int res;
323 
324 	__asm __volatile(
325 	    "1: ldxr	%w0, [%2]      \n"
326 	    "   orr	%w0, %w0, %w3  \n"
327 	    "   stlxr	%w1, %w0, [%2] \n"
328             "   cbnz	%w1, 1b        \n"
329 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc", "memory"
330 	);
331 }
332 
333 static __inline void
334 atomic_store_rel_32(volatile uint32_t *p, uint32_t val)
335 {
336 
337 	__asm __volatile(
338 	    "stlr	%w0, [%1] \n"
339 	    : : "r" (val), "r" (p) : "memory");
340 }
341 
342 static __inline void
343 atomic_subtract_rel_32(volatile uint32_t *p, uint32_t val)
344 {
345 	uint32_t tmp;
346 	int res;
347 
348 	__asm __volatile(
349 	    "1: ldxr	%w0, [%2]      \n"
350 	    "   sub	%w0, %w0, %w3  \n"
351 	    "   stlxr	%w1, %w0, [%2] \n"
352             "   cbnz	%w1, 1b        \n"
353 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc", "memory"
354 	);
355 }
356 
357 #define	atomic_add_rel_int	atomic_add_rel_32
358 #define	atomic_clear_rel_int	atomic_add_rel_32
359 #define	atomic_cmpset_rel_int	atomic_cmpset_rel_32
360 #define	atomic_set_rel_int	atomic_set_rel_32
361 #define	atomic_subtract_rel_int	atomic_subtract_rel_32
362 #define	atomic_store_rel_int	atomic_store_rel_32
363 
364 
365 static __inline void
366 atomic_add_64(volatile uint64_t *p, uint64_t val)
367 {
368 	uint64_t tmp;
369 	int res;
370 
371 	__asm __volatile(
372 	    "1: ldxr	%0, [%2]      \n"
373 	    "   add	%0, %0, %3    \n"
374 	    "   stxr	%w1, %0, [%2] \n"
375             "   cbnz	%w1, 1b       \n"
376 	    : "=&r" (tmp), "=&r" (res), "+r" (p), "+r" (val) : : "cc"
377 	);
378 }
379 
380 static __inline void
381 atomic_clear_64(volatile uint64_t *p, uint64_t val)
382 {
383 	uint64_t tmp;
384 	int res;
385 
386 	__asm __volatile(
387 	    "1: ldxr	%0, [%2]      \n"
388 	    "   bic	%0, %0, %3    \n"
389 	    "   stxr	%w1, %0, [%2] \n"
390             "   cbnz	%w1, 1b       \n"
391 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc"
392 	);
393 }
394 
395 static __inline int
396 atomic_cmpset_64(volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
397 {
398 	uint64_t tmp;
399 	int res;
400 
401 	__asm __volatile(
402 	    "1: mov	%w1, #1       \n"
403 	    "   ldxr	%0, [%2]      \n"
404 	    "   cmp	%0, %3        \n"
405 	    "   b.ne	2f            \n"
406 	    "   stxr	%w1, %4, [%2] \n"
407             "   cbnz	%w1, 1b       \n"
408 	    "2:"
409 	    : "=&r" (tmp), "=&r"(res), "+r" (p), "+r" (cmpval), "+r" (newval)
410 	    : : "cc", "memory"
411 	);
412 
413 	return (!res);
414 }
415 
416 static __inline uint64_t
417 atomic_fetchadd_64(volatile uint64_t *p, uint64_t val)
418 {
419 	uint64_t tmp, ret;
420 	int res;
421 
422 	__asm __volatile(
423 	    "1: ldxr	%4, [%2]      \n"
424 	    "   add	%0, %4, %3    \n"
425 	    "   stxr	%w1, %0, [%2] \n"
426             "   cbnz	%w1, 1b       \n"
427 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val), "=&r"(ret) : : "cc"
428 	);
429 
430 	return (ret);
431 }
432 
433 static __inline uint64_t
434 atomic_readandclear_64(volatile uint64_t *p)
435 {
436 	uint64_t tmp, ret;
437 	int res;
438 
439 	__asm __volatile(
440 	    "   mov	%0, #0        \n"
441 	    "1: ldxr	%3, [%2]      \n"
442 	    "   stxr	%w1, %0, [%2] \n"
443             "   cbnz	%w1, 1b       \n"
444 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "=&r"(ret) : : "cc"
445 	);
446 
447 	return (ret);
448 }
449 
450 static __inline void
451 atomic_set_64(volatile uint64_t *p, uint64_t val)
452 {
453 	uint64_t tmp;
454 	int res;
455 
456 	__asm __volatile(
457 	    "1: ldxr	%0, [%2]      \n"
458 	    "   orr	%0, %0, %3    \n"
459 	    "   stxr	%w1, %0, [%2] \n"
460             "   cbnz	%w1, 1b       \n"
461 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc"
462 	);
463 }
464 
465 static __inline void
466 atomic_subtract_64(volatile uint64_t *p, uint64_t val)
467 {
468 	uint64_t tmp;
469 	int res;
470 
471 	__asm __volatile(
472 	    "1: ldxr	%0, [%2]      \n"
473 	    "   sub	%0, %0, %3    \n"
474 	    "   stxr	%w1, %0, [%2] \n"
475             "   cbnz	%w1, 1b       \n"
476 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc"
477 	);
478 }
479 
480 static __inline uint64_t
481 atomic_swap_64(volatile uint64_t *p, uint64_t val)
482 {
483 	uint64_t old;
484 	int res;
485 
486 	__asm __volatile(
487 	    "1: ldxr	%0, [%2]      \n"
488 	    "   stxr	%w1, %3, [%2] \n"
489             "   cbnz	%w1, 1b       \n"
490 	    : "=&r"(old), "=&r"(res), "+r" (p), "+r" (val) : : "cc", "memory"
491 	);
492 
493 	return (old);
494 }
495 
496 #define	atomic_add_long			atomic_add_64
497 #define	atomic_clear_long		atomic_clear_64
498 #define	atomic_cmpset_long		atomic_cmpset_64
499 #define	atomic_fetchadd_long		atomic_fetchadd_64
500 #define	atomic_readandclear_long	atomic_readandclear_64
501 #define	atomic_set_long			atomic_set_64
502 #define	atomic_subtract_long		atomic_subtract_64
503 
504 #define	atomic_add_ptr			atomic_add_64
505 #define	atomic_clear_ptr		atomic_clear_64
506 #define	atomic_cmpset_ptr		atomic_cmpset_64
507 #define	atomic_fetchadd_ptr		atomic_fetchadd_64
508 #define	atomic_readandclear_ptr		atomic_readandclear_64
509 #define	atomic_set_ptr			atomic_set_64
510 #define	atomic_subtract_ptr		atomic_subtract_64
511 
512 static __inline void
513 atomic_add_acq_64(volatile uint64_t *p, uint64_t val)
514 {
515 	uint64_t tmp;
516 	int res;
517 
518 	__asm __volatile(
519 	    "1: ldaxr	%0, [%2]      \n"
520 	    "   add	%0, %0, %3    \n"
521 	    "   stxr	%w1, %0, [%2] \n"
522             "   cbnz	%w1, 1b       \n"
523 	    "2:"
524 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc", "memory"
525 	);
526 }
527 
528 static __inline void
529 atomic_clear_acq_64(volatile uint64_t *p, uint64_t val)
530 {
531 	uint64_t tmp;
532 	int res;
533 
534 	__asm __volatile(
535 	    "1: ldaxr	%0, [%2]      \n"
536 	    "   bic	%0, %0, %3    \n"
537 	    "   stxr	%w1, %0, [%2] \n"
538             "   cbnz	%w1, 1b       \n"
539 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc", "memory"
540 	);
541 }
542 
543 static __inline int
544 atomic_cmpset_acq_64(volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
545 {
546 	uint64_t tmp;
547 	int res;
548 
549 	__asm __volatile(
550 	    "1: mov	%w1, #1       \n"
551 	    "   ldaxr	%0, [%2]      \n"
552 	    "   cmp	%0, %3        \n"
553 	    "   b.ne	2f            \n"
554 	    "   stxr	%w1, %4, [%2] \n"
555             "   cbnz	%w1, 1b       \n"
556 	    "2:"
557 	    : "=&r" (tmp), "=&r" (res), "+r" (p), "+r" (cmpval), "+r" (newval)
558 	    : : "cc", "memory"
559 	);
560 
561 	return (!res);
562 }
563 
564 static __inline uint64_t
565 atomic_load_acq_64(volatile uint64_t *p)
566 {
567 	uint64_t ret;
568 
569 	__asm __volatile(
570 	    "ldar	%0, [%1] \n"
571 	    : "=&r" (ret) : "r" (p) : "memory");
572 
573 	return (ret);
574 }
575 
576 static __inline void
577 atomic_set_acq_64(volatile uint64_t *p, uint64_t val)
578 {
579 	uint64_t tmp;
580 	int res;
581 
582 	__asm __volatile(
583 	    "1: ldaxr	%0, [%2]      \n"
584 	    "   orr	%0, %0, %3    \n"
585 	    "   stxr	%w1, %0, [%2] \n"
586             "   cbnz	%w1, 1b       \n"
587 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc", "memory"
588 	);
589 }
590 
591 static __inline void
592 atomic_subtract_acq_64(volatile uint64_t *p, uint64_t val)
593 {
594 	uint64_t tmp;
595 	int res;
596 
597 	__asm __volatile(
598 	    "1: ldaxr	%0, [%2]      \n"
599 	    "   sub	%0, %0, %3    \n"
600 	    "   stxr	%w1, %0, [%2] \n"
601             "   cbnz	%w1, 1b       \n"
602 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc", "memory"
603 	);
604 }
605 
606 #define	atomic_add_acq_long		atomic_add_acq_64
607 #define	atomic_clear_acq_long		atomic_add_acq_64
608 #define	atomic_cmpset_acq_long		atomic_cmpset_acq_64
609 #define	atomic_load_acq_long		atomic_load_acq_64
610 #define	atomic_set_acq_long		atomic_set_acq_64
611 #define	atomic_subtract_acq_long	atomic_subtract_acq_64
612 
613 #define	atomic_add_acq_ptr		atomic_add_acq_64
614 #define	atomic_clear_acq_ptr		atomic_add_acq_64
615 #define	atomic_cmpset_acq_ptr		atomic_cmpset_acq_64
616 #define	atomic_load_acq_ptr		atomic_load_acq_64
617 #define	atomic_set_acq_ptr		atomic_set_acq_64
618 #define	atomic_subtract_acq_ptr		atomic_subtract_acq_64
619 
620 /*
621  * TODO: The atomic functions currently are both acq and rel, we should fix
622  * this.
623  */
624 static __inline void
625 atomic_add_rel_64(volatile uint64_t *p, uint64_t val)
626 {
627 	uint64_t tmp;
628 	int res;
629 
630 	__asm __volatile(
631 	    "1: ldxr	%0, [%2]      \n"
632 	    "   add	%0, %0, %3    \n"
633 	    "   stlxr	%w1, %0, [%2] \n"
634             "   cbnz	%w1, 1b       \n"
635 	    "2:"
636 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc", "memory"
637 	);
638 }
639 
640 static __inline void
641 atomic_clear_rel_64(volatile uint64_t *p, uint64_t val)
642 {
643 	uint64_t tmp;
644 	int res;
645 
646 	__asm __volatile(
647 	    "1: ldxr	%0, [%2]      \n"
648 	    "   bic	%0, %0, %3    \n"
649 	    "   stlxr	%w1, %0, [%2] \n"
650             "   cbnz	%w1, 1b       \n"
651 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc", "memory"
652 	);
653 }
654 
655 static __inline int
656 atomic_cmpset_rel_64(volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
657 {
658 	uint64_t tmp;
659 	int res;
660 
661 	__asm __volatile(
662 	    "1: mov	%w1, #1       \n"
663 	    "   ldxr	%0, [%2]      \n"
664 	    "   cmp	%0, %3        \n"
665 	    "   b.ne	2f            \n"
666 	    "   stlxr	%w1, %4, [%2] \n"
667             "   cbnz	%w1, 1b       \n"
668 	    "2:"
669 	    : "=&r" (tmp), "=&r" (res), "+r" (p), "+r" (cmpval), "+r" (newval)
670 	    : : "cc", "memory"
671 	);
672 
673 	return (!res);
674 }
675 
676 static __inline void
677 atomic_set_rel_64(volatile uint64_t *p, uint64_t val)
678 {
679 	uint64_t tmp;
680 	int res;
681 
682 	__asm __volatile(
683 	    "1: ldxr	%0, [%2]      \n"
684 	    "   orr	%0, %0, %3    \n"
685 	    "   stlxr	%w1, %0, [%2] \n"
686             "   cbnz	%w1, 1b       \n"
687 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc", "memory"
688 	);
689 }
690 
691 static __inline void
692 atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
693 {
694 
695 	__asm __volatile(
696 	    "stlr	%0, [%1] \n"
697 	    : : "r" (val), "r" (p) : "memory");
698 }
699 
700 static __inline void
701 atomic_subtract_rel_64(volatile uint64_t *p, uint64_t val)
702 {
703 	uint64_t tmp;
704 	int res;
705 
706 	__asm __volatile(
707 	    "1: ldxr	%0, [%2]      \n"
708 	    "   sub	%0, %0, %3    \n"
709 	    "   stlxr	%w1, %0, [%2] \n"
710             "   cbnz	%w1, 1b       \n"
711 	    : "=&r"(tmp), "=&r"(res), "+r" (p), "+r" (val) : : "cc", "memory"
712 	);
713 }
714 
715 #define	atomic_add_rel_long		atomic_add_rel_64
716 #define	atomic_clear_rel_long		atomic_clear_rel_64
717 #define	atomic_cmpset_rel_long		atomic_cmpset_rel_64
718 #define	atomic_set_rel_long		atomic_set_rel_64
719 #define	atomic_subtract_rel_long	atomic_subtract_rel_64
720 #define	atomic_store_rel_long		atomic_store_rel_64
721 
722 #define	atomic_add_rel_ptr		atomic_add_rel_64
723 #define	atomic_clear_rel_ptr		atomic_clear_rel_64
724 #define	atomic_cmpset_rel_ptr		atomic_cmpset_rel_64
725 #define	atomic_set_rel_ptr		atomic_set_rel_64
726 #define	atomic_subtract_rel_ptr		atomic_subtract_rel_64
727 #define	atomic_store_rel_ptr		atomic_store_rel_64
728 
729 #endif /* _MACHINE_ATOMIC_H_ */
730 
731