xref: /freebsd/sys/arm/include/atomic.h (revision c243e4902be8df1e643c76b5f18b68bb77cc5268)
1 /* $NetBSD: atomic.h,v 1.1 2002/10/19 12:22:34 bsh Exp $ */
2 
3 /*-
4  * Copyright (C) 2003-2004 Olivier Houchard
5  * Copyright (C) 1994-1997 Mark Brinicombe
6  * Copyright (C) 1994 Brini
7  * All rights reserved.
8  *
9  * This code is derived from software written for Brini by Mark Brinicombe
10  *
11  * Redistribution and use in source and binary forms, with or without
12  * modification, are permitted provided that the following conditions
13  * are met:
14  * 1. Redistributions of source code must retain the above copyright
15  *    notice, this list of conditions and the following disclaimer.
16  * 2. Redistributions in binary form must reproduce the above copyright
17  *    notice, this list of conditions and the following disclaimer in the
18  *    documentation and/or other materials provided with the distribution.
19  * 3. All advertising materials mentioning features or use of this software
20  *    must display the following acknowledgement:
21  *	This product includes software developed by Brini.
22  * 4. The name of Brini may not be used to endorse or promote products
23  *    derived from this software without specific prior written permission.
24  *
25  * THIS SOFTWARE IS PROVIDED BY BRINI ``AS IS'' AND ANY EXPRESS OR
26  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
27  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
28  * IN NO EVENT SHALL BRINI BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
29  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
30  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
31  * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
32  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
33  * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
34  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35  *
36  * $FreeBSD$
37  */
38 
39 #ifndef	_MACHINE_ATOMIC_H_
40 #define	_MACHINE_ATOMIC_H_
41 
42 #include <sys/types.h>
43 
44 #ifndef _KERNEL
45 #include <machine/sysarch.h>
46 #else
47 #include <machine/cpuconf.h>
48 #endif
49 
50 #define mb()
51 #define wmb()
52 #define rmb()
53 
54 #ifndef I32_bit
55 #define I32_bit (1 << 7)        /* IRQ disable */
56 #endif
57 #ifndef F32_bit
58 #define F32_bit (1 << 6)        /* FIQ disable */
59 #endif
60 
61 /*
62  * It would be nice to use _HAVE_ARMv6_INSTRUCTIONS from machine/asm.h
63  * here, but that header can't be included here because this is C
64  * code.  I would like to move the _HAVE_ARMv6_INSTRUCTIONS definition
65  * out of asm.h so it can be used in both asm and C code. - kientzle@
66  */
67 #if defined (__ARM_ARCH_7__) || \
68 	defined (__ARM_ARCH_7A__) || \
69 	defined (__ARM_ARCH_6__) || \
70 	defined (__ARM_ARCH_6J__) || \
71 	defined (__ARM_ARCH_6K__) || \
72 	defined (__ARM_ARCH_6Z__) || \
73 	defined (__ARM_ARCH_6ZK__)
74 static __inline void
75 __do_dmb(void)
76 {
77 
78 #if defined (__ARM_ARCH_7__) || defined (__ARM_ARCH_7A__)
79 	__asm __volatile("dmb" : : : "memory");
80 #else
81 	__asm __volatile("mcr p15, 0, r0, c7, c10, 5" : : : "memory");
82 #endif
83 }
84 
85 #define ATOMIC_ACQ_REL_LONG(NAME)					\
86 static __inline void							\
87 atomic_##NAME##_acq_long(__volatile u_long *p, u_long v)		\
88 {									\
89 	atomic_##NAME##_long(p, v);					\
90 	__do_dmb();							\
91 }									\
92 									\
93 static __inline  void							\
94 atomic_##NAME##_rel_long(__volatile u_long *p, u_long v)		\
95 {									\
96 	__do_dmb();							\
97 	atomic_##NAME##_long(p, v);					\
98 }
99 
100 #define	ATOMIC_ACQ_REL(NAME, WIDTH)					\
101 static __inline  void							\
102 atomic_##NAME##_acq_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
103 {									\
104 	atomic_##NAME##_##WIDTH(p, v);					\
105 	__do_dmb();							\
106 }									\
107 									\
108 static __inline  void							\
109 atomic_##NAME##_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
110 {									\
111 	__do_dmb();							\
112 	atomic_##NAME##_##WIDTH(p, v);					\
113 }
114 
115 static __inline void
116 atomic_set_32(volatile uint32_t *address, uint32_t setmask)
117 {
118 	uint32_t tmp = 0, tmp2 = 0;
119 
120 	__asm __volatile("1: ldrex %0, [%2]\n"
121 	    		    "orr %0, %0, %3\n"
122 			    "strex %1, %0, [%2]\n"
123 			    "cmp %1, #0\n"
124 			    "bne	1b\n"
125 			   : "=&r" (tmp), "+r" (tmp2)
126 			   , "+r" (address), "+r" (setmask) : : "memory");
127 
128 }
129 
130 static __inline void
131 atomic_set_long(volatile u_long *address, u_long setmask)
132 {
133 	u_long tmp = 0, tmp2 = 0;
134 
135 	__asm __volatile("1: ldrex %0, [%2]\n"
136 	    		    "orr %0, %0, %3\n"
137 			    "strex %1, %0, [%2]\n"
138 			    "cmp %1, #0\n"
139 			    "bne	1b\n"
140 			   : "=&r" (tmp), "+r" (tmp2)
141 			   , "+r" (address), "+r" (setmask) : : "memory");
142 
143 }
144 
145 static __inline void
146 atomic_clear_32(volatile uint32_t *address, uint32_t setmask)
147 {
148 	uint32_t tmp = 0, tmp2 = 0;
149 
150 	__asm __volatile("1: ldrex %0, [%2]\n"
151 	    		    "bic %0, %0, %3\n"
152 			    "strex %1, %0, [%2]\n"
153 			    "cmp %1, #0\n"
154 			    "bne	1b\n"
155 			   : "=&r" (tmp), "+r" (tmp2)
156 			   ,"+r" (address), "+r" (setmask) : : "memory");
157 }
158 
159 static __inline void
160 atomic_clear_long(volatile u_long *address, u_long setmask)
161 {
162 	u_long tmp = 0, tmp2 = 0;
163 
164 	__asm __volatile("1: ldrex %0, [%2]\n"
165 	    		    "bic %0, %0, %3\n"
166 			    "strex %1, %0, [%2]\n"
167 			    "cmp %1, #0\n"
168 			    "bne	1b\n"
169 			   : "=&r" (tmp), "+r" (tmp2)
170 			   ,"+r" (address), "+r" (setmask) : : "memory");
171 }
172 
173 static __inline u_int32_t
174 atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
175 {
176 	uint32_t ret;
177 
178 	__asm __volatile("1: ldrex %0, [%1]\n"
179 	                 "cmp %0, %2\n"
180 			 "movne %0, #0\n"
181 			 "bne 2f\n"
182 			 "strex %0, %3, [%1]\n"
183 			 "cmp %0, #0\n"
184 			 "bne	1b\n"
185 			 "moveq %0, #1\n"
186 			 "2:"
187 			 : "=&r" (ret)
188 			 ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "memory");
189 	return (ret);
190 }
191 
192 static __inline u_long
193 atomic_cmpset_long(volatile u_long *p, volatile u_long cmpval, volatile u_long newval)
194 {
195 	u_long ret;
196 
197 	__asm __volatile("1: ldrex %0, [%1]\n"
198 	                 "cmp %0, %2\n"
199 			 "movne %0, #0\n"
200 			 "bne 2f\n"
201 			 "strex %0, %3, [%1]\n"
202 			 "cmp %0, #0\n"
203 			 "bne	1b\n"
204 			 "moveq %0, #1\n"
205 			 "2:"
206 			 : "=&r" (ret)
207 			 ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "memory");
208 	return (ret);
209 }
210 
211 static __inline u_int32_t
212 atomic_cmpset_acq_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
213 {
214 	u_int32_t ret = atomic_cmpset_32(p, cmpval, newval);
215 
216 	__do_dmb();
217 	return (ret);
218 }
219 
220 static __inline u_long
221 atomic_cmpset_acq_long(volatile u_long *p, volatile u_long cmpval, volatile u_long newval)
222 {
223 	u_long ret = atomic_cmpset_long(p, cmpval, newval);
224 
225 	__do_dmb();
226 	return (ret);
227 }
228 
229 static __inline u_int32_t
230 atomic_cmpset_rel_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
231 {
232 
233 	__do_dmb();
234 	return (atomic_cmpset_32(p, cmpval, newval));
235 }
236 
237 static __inline u_long
238 atomic_cmpset_rel_long(volatile u_long *p, volatile u_long cmpval, volatile u_long newval)
239 {
240 
241 	__do_dmb();
242 	return (atomic_cmpset_long(p, cmpval, newval));
243 }
244 
245 
246 static __inline void
247 atomic_add_32(volatile u_int32_t *p, u_int32_t val)
248 {
249 	uint32_t tmp = 0, tmp2 = 0;
250 
251 	__asm __volatile("1: ldrex %0, [%2]\n"
252 	    		    "add %0, %0, %3\n"
253 			    "strex %1, %0, [%2]\n"
254 			    "cmp %1, #0\n"
255 			    "bne	1b\n"
256 			    : "=&r" (tmp), "+r" (tmp2)
257 			    ,"+r" (p), "+r" (val) : : "memory");
258 }
259 
260 static __inline void
261 atomic_add_long(volatile u_long *p, u_long val)
262 {
263 	u_long tmp = 0, tmp2 = 0;
264 
265 	__asm __volatile("1: ldrex %0, [%2]\n"
266 	    		    "add %0, %0, %3\n"
267 			    "strex %1, %0, [%2]\n"
268 			    "cmp %1, #0\n"
269 			    "bne	1b\n"
270 			    : "=&r" (tmp), "+r" (tmp2)
271 			    ,"+r" (p), "+r" (val) : : "memory");
272 }
273 
274 static __inline void
275 atomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
276 {
277 	uint32_t tmp = 0, tmp2 = 0;
278 
279 	__asm __volatile("1: ldrex %0, [%2]\n"
280 	    		    "sub %0, %0, %3\n"
281 			    "strex %1, %0, [%2]\n"
282 			    "cmp %1, #0\n"
283 			    "bne	1b\n"
284 			    : "=&r" (tmp), "+r" (tmp2)
285 			    ,"+r" (p), "+r" (val) : : "memory");
286 }
287 
288 static __inline void
289 atomic_subtract_long(volatile u_long *p, u_long val)
290 {
291 	u_long tmp = 0, tmp2 = 0;
292 
293 	__asm __volatile("1: ldrex %0, [%2]\n"
294 	    		    "sub %0, %0, %3\n"
295 			    "strex %1, %0, [%2]\n"
296 			    "cmp %1, #0\n"
297 			    "bne	1b\n"
298 			    : "=&r" (tmp), "+r" (tmp2)
299 			    ,"+r" (p), "+r" (val) : : "memory");
300 }
301 
302 ATOMIC_ACQ_REL(clear, 32)
303 ATOMIC_ACQ_REL(add, 32)
304 ATOMIC_ACQ_REL(subtract, 32)
305 ATOMIC_ACQ_REL(set, 32)
306 ATOMIC_ACQ_REL_LONG(clear)
307 ATOMIC_ACQ_REL_LONG(add)
308 ATOMIC_ACQ_REL_LONG(subtract)
309 ATOMIC_ACQ_REL_LONG(set)
310 
311 #undef ATOMIC_ACQ_REL
312 #undef ATOMIC_ACQ_REL_LONG
313 
314 static __inline uint32_t
315 atomic_fetchadd_32(volatile uint32_t *p, uint32_t val)
316 {
317 	uint32_t tmp = 0, tmp2 = 0, ret = 0;
318 
319 	__asm __volatile("1: ldrex %0, [%3]\n"
320 	    		    "add %1, %0, %4\n"
321 			    "strex %2, %1, [%3]\n"
322 			    "cmp %2, #0\n"
323 			    "bne	1b\n"
324 			   : "+r" (ret), "=&r" (tmp), "+r" (tmp2)
325 			   ,"+r" (p), "+r" (val) : : "memory");
326 	return (ret);
327 }
328 
329 static __inline uint32_t
330 atomic_readandclear_32(volatile u_int32_t *p)
331 {
332 	uint32_t ret, tmp = 0, tmp2 = 0;
333 
334 	__asm __volatile("1: ldrex %0, [%3]\n"
335 	    		 "mov %1, #0\n"
336 			 "strex %2, %1, [%3]\n"
337 			 "cmp %2, #0\n"
338 			 "bne 1b\n"
339 			 : "=r" (ret), "=&r" (tmp), "+r" (tmp2)
340 			 ,"+r" (p) : : "memory");
341 	return (ret);
342 }
343 
344 static __inline uint32_t
345 atomic_load_acq_32(volatile uint32_t *p)
346 {
347 	uint32_t v;
348 
349 	v = *p;
350 	__do_dmb();
351 	return (v);
352 }
353 
354 static __inline void
355 atomic_store_rel_32(volatile uint32_t *p, uint32_t v)
356 {
357 
358 	__do_dmb();
359 	*p = v;
360 }
361 
362 static __inline u_long
363 atomic_fetchadd_long(volatile u_long *p, u_long val)
364 {
365 	u_long tmp = 0, tmp2 = 0, ret = 0;
366 
367 	__asm __volatile("1: ldrex %0, [%3]\n"
368 	    		    "add %1, %0, %4\n"
369 			    "strex %2, %1, [%3]\n"
370 			    "cmp %2, #0\n"
371 			    "bne	1b\n"
372 			   : "+r" (ret), "=&r" (tmp), "+r" (tmp2)
373 			   ,"+r" (p), "+r" (val) : : "memory");
374 	return (ret);
375 }
376 
377 static __inline u_long
378 atomic_readandclear_long(volatile u_long *p)
379 {
380 	u_long ret, tmp = 0, tmp2 = 0;
381 
382 	__asm __volatile("1: ldrex %0, [%3]\n"
383 	    		 "mov %1, #0\n"
384 			 "strex %2, %1, [%3]\n"
385 			 "cmp %2, #0\n"
386 			 "bne 1b\n"
387 			 : "=r" (ret), "=&r" (tmp), "+r" (tmp2)
388 			 ,"+r" (p) : : "memory");
389 	return (ret);
390 }
391 
392 static __inline u_long
393 atomic_load_acq_long(volatile u_long *p)
394 {
395 	u_long v;
396 
397 	v = *p;
398 	__do_dmb();
399 	return (v);
400 }
401 
402 static __inline void
403 atomic_store_rel_long(volatile u_long *p, u_long v)
404 {
405 
406 	__do_dmb();
407 	*p = v;
408 }
409 #else /* < armv6 */
410 
411 #define __with_interrupts_disabled(expr) \
412 	do {						\
413 		u_int cpsr_save, tmp;			\
414 							\
415 		__asm __volatile(			\
416 			"mrs  %0, cpsr;"		\
417 			"orr  %1, %0, %2;"		\
418 			"msr  cpsr_all, %1;"		\
419 			: "=r" (cpsr_save), "=r" (tmp)	\
420 			: "I" (I32_bit | F32_bit)		\
421 		        : "cc" );		\
422 		(expr);				\
423 		 __asm __volatile(		\
424 			"msr  cpsr_all, %0"	\
425 			: /* no output */	\
426 			: "r" (cpsr_save)	\
427 			: "cc" );		\
428 	} while(0)
429 
430 static __inline uint32_t
431 __swp(uint32_t val, volatile uint32_t *ptr)
432 {
433 	__asm __volatile("swp	%0, %2, [%3]"
434 	    : "=&r" (val), "=m" (*ptr)
435 	    : "r" (val), "r" (ptr), "m" (*ptr)
436 	    : "memory");
437 	return (val);
438 }
439 
440 
441 #ifdef _KERNEL
442 static __inline void
443 atomic_set_32(volatile uint32_t *address, uint32_t setmask)
444 {
445 	__with_interrupts_disabled(*address |= setmask);
446 }
447 
448 static __inline void
449 atomic_clear_32(volatile uint32_t *address, uint32_t clearmask)
450 {
451 	__with_interrupts_disabled(*address &= ~clearmask);
452 }
453 
454 static __inline u_int32_t
455 atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
456 {
457 	int ret;
458 
459 	__with_interrupts_disabled(
460 	 {
461 	    	if (*p == cmpval) {
462 			*p = newval;
463 			ret = 1;
464 		} else {
465 			ret = 0;
466 		}
467 	});
468 	return (ret);
469 }
470 
471 static __inline void
472 atomic_add_32(volatile u_int32_t *p, u_int32_t val)
473 {
474 	__with_interrupts_disabled(*p += val);
475 }
476 
477 static __inline void
478 atomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
479 {
480 	__with_interrupts_disabled(*p -= val);
481 }
482 
483 static __inline uint32_t
484 atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
485 {
486 	uint32_t value;
487 
488 	__with_interrupts_disabled(
489 	{
490 	    	value = *p;
491 		*p += v;
492 	});
493 	return (value);
494 }
495 
496 #else /* !_KERNEL */
497 
498 static __inline u_int32_t
499 atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
500 {
501 	register int done, ras_start = ARM_RAS_START;
502 
503 	__asm __volatile("1:\n"
504 	    "adr	%1, 1b\n"
505 	    "str	%1, [%0]\n"
506 	    "adr	%1, 2f\n"
507 	    "str	%1, [%0, #4]\n"
508 	    "ldr	%1, [%2]\n"
509 	    "cmp	%1, %3\n"
510 	    "streq	%4, [%2]\n"
511 	    "2:\n"
512 	    "mov	%1, #0\n"
513 	    "str	%1, [%0]\n"
514 	    "mov	%1, #0xffffffff\n"
515 	    "str	%1, [%0, #4]\n"
516 	    "moveq	%1, #1\n"
517 	    "movne	%1, #0\n"
518 	    : "+r" (ras_start), "=r" (done)
519 	    ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "memory");
520 	return (done);
521 }
522 
523 static __inline void
524 atomic_add_32(volatile u_int32_t *p, u_int32_t val)
525 {
526 	int start, ras_start = ARM_RAS_START;
527 
528 	__asm __volatile("1:\n"
529 	    "adr	%1, 1b\n"
530 	    "str	%1, [%0]\n"
531 	    "adr	%1, 2f\n"
532 	    "str	%1, [%0, #4]\n"
533 	    "ldr	%1, [%2]\n"
534 	    "add	%1, %1, %3\n"
535 	    "str	%1, [%2]\n"
536 	    "2:\n"
537 	    "mov	%1, #0\n"
538 	    "str	%1, [%0]\n"
539 	    "mov	%1, #0xffffffff\n"
540 	    "str	%1, [%0, #4]\n"
541 	    : "+r" (ras_start), "=r" (start), "+r" (p), "+r" (val)
542 	    : : "memory");
543 }
544 
545 static __inline void
546 atomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
547 {
548 	int start, ras_start = ARM_RAS_START;
549 
550 	__asm __volatile("1:\n"
551 	    "adr	%1, 1b\n"
552 	    "str	%1, [%0]\n"
553 	    "adr	%1, 2f\n"
554 	    "str	%1, [%0, #4]\n"
555 	    "ldr	%1, [%2]\n"
556 	    "sub	%1, %1, %3\n"
557 	    "str	%1, [%2]\n"
558 	    "2:\n"
559 	    "mov	%1, #0\n"
560 	    "str	%1, [%0]\n"
561 	    "mov	%1, #0xffffffff\n"
562 	    "str	%1, [%0, #4]\n"
563 
564 	    : "+r" (ras_start), "=r" (start), "+r" (p), "+r" (val)
565 	    : : "memory");
566 }
567 
568 static __inline void
569 atomic_set_32(volatile uint32_t *address, uint32_t setmask)
570 {
571 	int start, ras_start = ARM_RAS_START;
572 
573 	__asm __volatile("1:\n"
574 	    "adr	%1, 1b\n"
575 	    "str	%1, [%0]\n"
576 	    "adr	%1, 2f\n"
577 	    "str	%1, [%0, #4]\n"
578 	    "ldr	%1, [%2]\n"
579 	    "orr	%1, %1, %3\n"
580 	    "str	%1, [%2]\n"
581 	    "2:\n"
582 	    "mov	%1, #0\n"
583 	    "str	%1, [%0]\n"
584 	    "mov	%1, #0xffffffff\n"
585 	    "str	%1, [%0, #4]\n"
586 
587 	    : "+r" (ras_start), "=r" (start), "+r" (address), "+r" (setmask)
588 	    : : "memory");
589 }
590 
591 static __inline void
592 atomic_clear_32(volatile uint32_t *address, uint32_t clearmask)
593 {
594 	int start, ras_start = ARM_RAS_START;
595 
596 	__asm __volatile("1:\n"
597 	    "adr	%1, 1b\n"
598 	    "str	%1, [%0]\n"
599 	    "adr	%1, 2f\n"
600 	    "str	%1, [%0, #4]\n"
601 	    "ldr	%1, [%2]\n"
602 	    "bic	%1, %1, %3\n"
603 	    "str	%1, [%2]\n"
604 	    "2:\n"
605 	    "mov	%1, #0\n"
606 	    "str	%1, [%0]\n"
607 	    "mov	%1, #0xffffffff\n"
608 	    "str	%1, [%0, #4]\n"
609 	    : "+r" (ras_start), "=r" (start), "+r" (address), "+r" (clearmask)
610 	    : : "memory");
611 
612 }
613 
614 static __inline uint32_t
615 atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
616 {
617 	uint32_t start, tmp, ras_start = ARM_RAS_START;
618 
619 	__asm __volatile("1:\n"
620 	    "adr	%1, 1b\n"
621 	    "str	%1, [%0]\n"
622 	    "adr	%1, 2f\n"
623 	    "str	%1, [%0, #4]\n"
624 	    "ldr	%1, [%3]\n"
625 	    "mov	%2, %1\n"
626 	    "add	%2, %2, %4\n"
627 	    "str	%2, [%3]\n"
628 	    "2:\n"
629 	    "mov	%2, #0\n"
630 	    "str	%2, [%0]\n"
631 	    "mov	%2, #0xffffffff\n"
632 	    "str	%2, [%0, #4]\n"
633 	    : "+r" (ras_start), "=r" (start), "=r" (tmp), "+r" (p), "+r" (v)
634 	    : : "memory");
635 	return (start);
636 }
637 
638 #endif /* _KERNEL */
639 
640 
641 static __inline uint32_t
642 atomic_readandclear_32(volatile u_int32_t *p)
643 {
644 
645 	return (__swp(0, p));
646 }
647 
648 #define atomic_cmpset_rel_32	atomic_cmpset_32
649 #define atomic_cmpset_acq_32	atomic_cmpset_32
650 #define atomic_set_rel_32	atomic_set_32
651 #define atomic_set_acq_32	atomic_set_32
652 #define atomic_clear_rel_32	atomic_clear_32
653 #define atomic_clear_acq_32	atomic_clear_32
654 #define atomic_add_rel_32	atomic_add_32
655 #define atomic_add_acq_32	atomic_add_32
656 #define atomic_subtract_rel_32	atomic_subtract_32
657 #define atomic_subtract_acq_32	atomic_subtract_32
658 #define atomic_store_rel_32	atomic_store_32
659 #define atomic_store_rel_long	atomic_store_long
660 #define atomic_load_acq_32	atomic_load_32
661 #define atomic_load_acq_long	atomic_load_long
662 #undef __with_interrupts_disabled
663 
664 static __inline void
665 atomic_add_long(volatile u_long *p, u_long v)
666 {
667 
668 	atomic_add_32((volatile uint32_t *)p, v);
669 }
670 
671 static __inline void
672 atomic_clear_long(volatile u_long *p, u_long v)
673 {
674 
675 	atomic_clear_32((volatile uint32_t *)p, v);
676 }
677 
678 static __inline int
679 atomic_cmpset_long(volatile u_long *dst, u_long old, u_long newe)
680 {
681 
682 	return (atomic_cmpset_32((volatile uint32_t *)dst, old, newe));
683 }
684 
685 static __inline u_long
686 atomic_fetchadd_long(volatile u_long *p, u_long v)
687 {
688 
689 	return (atomic_fetchadd_32((volatile uint32_t *)p, v));
690 }
691 
692 static __inline void
693 atomic_readandclear_long(volatile u_long *p)
694 {
695 
696 	atomic_readandclear_32((volatile uint32_t *)p);
697 }
698 
699 static __inline void
700 atomic_set_long(volatile u_long *p, u_long v)
701 {
702 
703 	atomic_set_32((volatile uint32_t *)p, v);
704 }
705 
706 static __inline void
707 atomic_subtract_long(volatile u_long *p, u_long v)
708 {
709 
710 	atomic_subtract_32((volatile uint32_t *)p, v);
711 }
712 
713 
714 
715 #endif /* Arch >= v6 */
716 
717 static __inline int
718 atomic_load_32(volatile uint32_t *v)
719 {
720 
721 	return (*v);
722 }
723 
724 static __inline void
725 atomic_store_32(volatile uint32_t *dst, uint32_t src)
726 {
727 	*dst = src;
728 }
729 
730 static __inline int
731 atomic_load_long(volatile u_long *v)
732 {
733 
734 	return (*v);
735 }
736 
737 static __inline void
738 atomic_store_long(volatile u_long *dst, u_long src)
739 {
740 	*dst = src;
741 }
742 
743 #define atomic_add_acq_long		atomic_add_long
744 #define atomic_add_rel_long		atomic_add_long
745 #define atomic_subtract_acq_long	atomic_subtract_long
746 #define atomic_subtract_rel_long	atomic_subtract_long
747 #define atomic_clear_acq_long		atomic_clear_long
748 #define atomic_clear_rel_long		atomic_clear_long
749 #define atomic_set_acq_long		atomic_set_long
750 #define atomic_set_rel_long		atomic_set_long
751 #define atomic_cmpset_acq_long		atomic_cmpset_long
752 #define atomic_cmpset_rel_long		atomic_cmpset_long
753 #define atomic_load_acq_long		atomic_load_long
754 
755 #define atomic_clear_ptr		atomic_clear_32
756 #define atomic_set_ptr			atomic_set_32
757 #define atomic_cmpset_ptr		atomic_cmpset_32
758 #define atomic_cmpset_rel_ptr		atomic_cmpset_rel_32
759 #define atomic_cmpset_acq_ptr		atomic_cmpset_acq_32
760 #define atomic_store_ptr		atomic_store_32
761 #define atomic_store_rel_ptr		atomic_store_ptr
762 
763 #define atomic_add_int			atomic_add_32
764 #define atomic_add_acq_int		atomic_add_acq_32
765 #define atomic_add_rel_int		atomic_add_rel_32
766 #define atomic_subtract_int		atomic_subtract_32
767 #define atomic_subtract_acq_int		atomic_subtract_acq_32
768 #define atomic_subtract_rel_int		atomic_subtract_rel_32
769 #define atomic_clear_int		atomic_clear_32
770 #define atomic_clear_acq_int		atomic_clear_acq_32
771 #define atomic_clear_rel_int		atomic_clear_rel_32
772 #define atomic_set_int			atomic_set_32
773 #define atomic_set_acq_int		atomic_set_acq_32
774 #define atomic_set_rel_int		atomic_set_rel_32
775 #define atomic_cmpset_int		atomic_cmpset_32
776 #define atomic_cmpset_acq_int		atomic_cmpset_acq_32
777 #define atomic_cmpset_rel_int		atomic_cmpset_rel_32
778 #define atomic_fetchadd_int		atomic_fetchadd_32
779 #define atomic_readandclear_int		atomic_readandclear_32
780 #define atomic_load_acq_int		atomic_load_acq_32
781 #define atomic_store_rel_int		atomic_store_rel_32
782 
783 #endif /* _MACHINE_ATOMIC_H_ */
784