1069e9bc1SDoug Rabson /*-
24d846d26SWarner Losh * SPDX-License-Identifier: BSD-2-Clause
383ef78beSPedro F. Giffuni *
4069e9bc1SDoug Rabson * Copyright (c) 1998 Doug Rabson
5069e9bc1SDoug Rabson * All rights reserved.
6069e9bc1SDoug Rabson *
7069e9bc1SDoug Rabson * Redistribution and use in source and binary forms, with or without
8069e9bc1SDoug Rabson * modification, are permitted provided that the following conditions
9069e9bc1SDoug Rabson * are met:
10069e9bc1SDoug Rabson * 1. Redistributions of source code must retain the above copyright
11069e9bc1SDoug Rabson * notice, this list of conditions and the following disclaimer.
12069e9bc1SDoug Rabson * 2. Redistributions in binary form must reproduce the above copyright
13069e9bc1SDoug Rabson * notice, this list of conditions and the following disclaimer in the
14069e9bc1SDoug Rabson * documentation and/or other materials provided with the distribution.
15069e9bc1SDoug Rabson *
16069e9bc1SDoug Rabson * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
17069e9bc1SDoug Rabson * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18069e9bc1SDoug Rabson * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19069e9bc1SDoug Rabson * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
20069e9bc1SDoug Rabson * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21069e9bc1SDoug Rabson * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22069e9bc1SDoug Rabson * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23069e9bc1SDoug Rabson * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24069e9bc1SDoug Rabson * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25069e9bc1SDoug Rabson * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26069e9bc1SDoug Rabson * SUCH DAMAGE.
27069e9bc1SDoug Rabson */
28069e9bc1SDoug Rabson #ifndef _MACHINE_ATOMIC_H_
29069e9bc1SDoug Rabson #define _MACHINE_ATOMIC_H_
30069e9bc1SDoug Rabson
3130d4f9e8SKonstantin Belousov #include <sys/atomic_common.h>
3230d4f9e8SKonstantin Belousov
333264fd70SJung-uk Kim #ifdef _KERNEL
343264fd70SJung-uk Kim #include <machine/md_var.h>
353264fd70SJung-uk Kim #include <machine/specialreg.h>
363264fd70SJung-uk Kim #endif
373264fd70SJung-uk Kim
3848cae112SKonstantin Belousov #ifndef __OFFSETOF_MONITORBUF
3948cae112SKonstantin Belousov /*
4048cae112SKonstantin Belousov * __OFFSETOF_MONITORBUF == __pcpu_offset(pc_monitorbuf).
4148cae112SKonstantin Belousov *
4248cae112SKonstantin Belousov * The open-coded number is used instead of the symbolic expression to
4348cae112SKonstantin Belousov * avoid a dependency on sys/pcpu.h in machine/atomic.h consumers.
4448cae112SKonstantin Belousov * An assertion in i386/vm_machdep.c ensures that the value is correct.
4548cae112SKonstantin Belousov */
4683c9dea1SGleb Smirnoff #define __OFFSETOF_MONITORBUF 0x80
4748cae112SKonstantin Belousov
4848cae112SKonstantin Belousov static __inline void
__mbk(void)4948cae112SKonstantin Belousov __mbk(void)
5048cae112SKonstantin Belousov {
5148cae112SKonstantin Belousov
5288478cc0SRyan Libby __asm __volatile("lock; addl $0,%%fs:%c0"
5388478cc0SRyan Libby : : "i" (__OFFSETOF_MONITORBUF) : "memory", "cc");
5448cae112SKonstantin Belousov }
5548cae112SKonstantin Belousov
5648cae112SKonstantin Belousov static __inline void
__mbu(void)5748cae112SKonstantin Belousov __mbu(void)
5848cae112SKonstantin Belousov {
5948cae112SKonstantin Belousov
6048cae112SKonstantin Belousov __asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc");
6148cae112SKonstantin Belousov }
6248cae112SKonstantin Belousov #endif
63db7f0b97SKip Macy
64069e9bc1SDoug Rabson /*
65f28e1c8fSBruce Evans * Various simple operations on memory, each of which is atomic in the
66f28e1c8fSBruce Evans * presence of interrupts and multiple processors.
67069e9bc1SDoug Rabson *
6847b8bc92SAlan Cox * atomic_set_char(P, V) (*(u_char *)(P) |= (V))
6947b8bc92SAlan Cox * atomic_clear_char(P, V) (*(u_char *)(P) &= ~(V))
7047b8bc92SAlan Cox * atomic_add_char(P, V) (*(u_char *)(P) += (V))
7147b8bc92SAlan Cox * atomic_subtract_char(P, V) (*(u_char *)(P) -= (V))
7247b8bc92SAlan Cox *
7347b8bc92SAlan Cox * atomic_set_short(P, V) (*(u_short *)(P) |= (V))
7447b8bc92SAlan Cox * atomic_clear_short(P, V) (*(u_short *)(P) &= ~(V))
7547b8bc92SAlan Cox * atomic_add_short(P, V) (*(u_short *)(P) += (V))
7647b8bc92SAlan Cox * atomic_subtract_short(P, V) (*(u_short *)(P) -= (V))
7747b8bc92SAlan Cox *
7847b8bc92SAlan Cox * atomic_set_int(P, V) (*(u_int *)(P) |= (V))
7947b8bc92SAlan Cox * atomic_clear_int(P, V) (*(u_int *)(P) &= ~(V))
8047b8bc92SAlan Cox * atomic_add_int(P, V) (*(u_int *)(P) += (V))
8147b8bc92SAlan Cox * atomic_subtract_int(P, V) (*(u_int *)(P) -= (V))
828a1ee2d3SJung-uk Kim * atomic_swap_int(P, V) (return (*(u_int *)(P)); *(u_int *)(P) = (V);)
83f28e1c8fSBruce Evans * atomic_readandclear_int(P) (return (*(u_int *)(P)); *(u_int *)(P) = 0;)
8447b8bc92SAlan Cox *
8547b8bc92SAlan Cox * atomic_set_long(P, V) (*(u_long *)(P) |= (V))
8647b8bc92SAlan Cox * atomic_clear_long(P, V) (*(u_long *)(P) &= ~(V))
8747b8bc92SAlan Cox * atomic_add_long(P, V) (*(u_long *)(P) += (V))
8847b8bc92SAlan Cox * atomic_subtract_long(P, V) (*(u_long *)(P) -= (V))
898a1ee2d3SJung-uk Kim * atomic_swap_long(P, V) (return (*(u_long *)(P)); *(u_long *)(P) = (V);)
90f28e1c8fSBruce Evans * atomic_readandclear_long(P) (return (*(u_long *)(P)); *(u_long *)(P) = 0;)
91069e9bc1SDoug Rabson */
92069e9bc1SDoug Rabson
932a89a48fSJohn Baldwin /*
94e6c45f37SGordon Bergling * Always use lock prefixes. The result is slightly less optimal for
959c0b759bSKonstantin Belousov * UP systems, but it matters less now, and sometimes UP is emulated
969c0b759bSKonstantin Belousov * over SMP.
979c0b759bSKonstantin Belousov *
9886d2e48cSAttilio Rao * The assembly is volatilized to avoid code chunk removal by the compiler.
9986d2e48cSAttilio Rao * GCC aggressively reorders operations and memory clobbering is necessary
10086d2e48cSAttilio Rao * in order to avoid that for memory barriers.
10147b8bc92SAlan Cox */
102e4e991e1SJohn Baldwin #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
10347b8bc92SAlan Cox static __inline void \
10403e3bc8eSAlan Cox atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
10547b8bc92SAlan Cox { \
1069c0b759bSKonstantin Belousov __asm __volatile("lock; " OP \
107fe94be3dSJung-uk Kim : "+m" (*p) \
108fe94be3dSJung-uk Kim : CONS (V) \
1097222d2fbSKonstantin Belousov : "cc"); \
1106d800f89SBruce Evans } \
11186d2e48cSAttilio Rao \
11286d2e48cSAttilio Rao static __inline void \
11386d2e48cSAttilio Rao atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
11486d2e48cSAttilio Rao { \
1159c0b759bSKonstantin Belousov __asm __volatile("lock; " OP \
116fe94be3dSJung-uk Kim : "+m" (*p) \
117fe94be3dSJung-uk Kim : CONS (V) \
1187222d2fbSKonstantin Belousov : "memory", "cc"); \
11986d2e48cSAttilio Rao } \
1206d800f89SBruce Evans struct __hack
1214c5aee92SMark Murray
122819e370cSPoul-Henning Kamp /*
1233d673254SMark Johnston * Atomic compare and set, used by the mutex functions.
124819e370cSPoul-Henning Kamp *
1253d673254SMark Johnston * cmpset:
1263d673254SMark Johnston * if (*dst == expect)
1273d673254SMark Johnston * *dst = src
128819e370cSPoul-Henning Kamp *
1293d673254SMark Johnston * fcmpset:
1303d673254SMark Johnston * if (*dst == *expect)
1313d673254SMark Johnston * *dst = src
1323d673254SMark Johnston * else
1333d673254SMark Johnston * *expect = *dst
1343d673254SMark Johnston *
1353d673254SMark Johnston * Returns 0 on failure, non-zero on success.
136819e370cSPoul-Henning Kamp */
1375788c2bdSMark Johnston #define ATOMIC_CMPSET(TYPE, CONS) \
1383d673254SMark Johnston static __inline int \
1393d673254SMark Johnston atomic_cmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE expect, u_##TYPE src) \
1403d673254SMark Johnston { \
1413d673254SMark Johnston u_char res; \
1423d673254SMark Johnston \
1433d673254SMark Johnston __asm __volatile( \
1449c0b759bSKonstantin Belousov " lock; cmpxchg %3,%1 ; " \
1453d673254SMark Johnston " sete %0 ; " \
1463d673254SMark Johnston "# atomic_cmpset_" #TYPE " " \
1473d673254SMark Johnston : "=q" (res), /* 0 */ \
1483d673254SMark Johnston "+m" (*dst), /* 1 */ \
1493d673254SMark Johnston "+a" (expect) /* 2 */ \
1505788c2bdSMark Johnston : CONS (src) /* 3 */ \
1513d673254SMark Johnston : "memory", "cc"); \
1523d673254SMark Johnston return (res); \
1533d673254SMark Johnston } \
1543d673254SMark Johnston \
1553d673254SMark Johnston static __inline int \
1563d673254SMark Johnston atomic_fcmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE *expect, u_##TYPE src) \
1573d673254SMark Johnston { \
1583d673254SMark Johnston u_char res; \
1593d673254SMark Johnston \
1603d673254SMark Johnston __asm __volatile( \
1619c0b759bSKonstantin Belousov " lock; cmpxchg %3,%1 ; " \
1623d673254SMark Johnston " sete %0 ; " \
1633d673254SMark Johnston "# atomic_fcmpset_" #TYPE " " \
1643d673254SMark Johnston : "=q" (res), /* 0 */ \
1653d673254SMark Johnston "+m" (*dst), /* 1 */ \
1663d673254SMark Johnston "+a" (*expect) /* 2 */ \
1675788c2bdSMark Johnston : CONS (src) /* 3 */ \
1683d673254SMark Johnston : "memory", "cc"); \
1693d673254SMark Johnston return (res); \
1708448afceSAttilio Rao }
1714c5aee92SMark Murray
1725788c2bdSMark Johnston ATOMIC_CMPSET(char, "q");
1735788c2bdSMark Johnston ATOMIC_CMPSET(short, "r");
1745788c2bdSMark Johnston ATOMIC_CMPSET(int, "r");
175e7a98aefSMateusz Guzik
1763c2bc2bfSJohn Baldwin /*
1773c2bc2bfSJohn Baldwin * Atomically add the value of v to the integer pointed to by p and return
1783c2bc2bfSJohn Baldwin * the previous value of *p.
1793c2bc2bfSJohn Baldwin */
1803c2bc2bfSJohn Baldwin static __inline u_int
atomic_fetchadd_int(volatile u_int * p,u_int v)1813c2bc2bfSJohn Baldwin atomic_fetchadd_int(volatile u_int *p, u_int v)
1823c2bc2bfSJohn Baldwin {
1833c2bc2bfSJohn Baldwin
1843c2bc2bfSJohn Baldwin __asm __volatile(
1859c0b759bSKonstantin Belousov " lock; xaddl %0,%1 ; "
1863c2bc2bfSJohn Baldwin "# atomic_fetchadd_int"
187ee93d117SJung-uk Kim : "+r" (v), /* 0 */
188fe94be3dSJung-uk Kim "+m" (*p) /* 1 */
189fe94be3dSJung-uk Kim : : "cc");
1903c2bc2bfSJohn Baldwin return (v);
1913c2bc2bfSJohn Baldwin }
1923c2bc2bfSJohn Baldwin
1938a1ee2d3SJung-uk Kim static __inline int
atomic_testandset_int(volatile u_int * p,u_int v)1948a1ee2d3SJung-uk Kim atomic_testandset_int(volatile u_int *p, u_int v)
1958a1ee2d3SJung-uk Kim {
1968a1ee2d3SJung-uk Kim u_char res;
1978a1ee2d3SJung-uk Kim
1988a1ee2d3SJung-uk Kim __asm __volatile(
1999c0b759bSKonstantin Belousov " lock; btsl %2,%1 ; "
2008a1ee2d3SJung-uk Kim " setc %0 ; "
2018a1ee2d3SJung-uk Kim "# atomic_testandset_int"
2028a1ee2d3SJung-uk Kim : "=q" (res), /* 0 */
2038a1ee2d3SJung-uk Kim "+m" (*p) /* 1 */
2048a1ee2d3SJung-uk Kim : "Ir" (v & 0x1f) /* 2 */
2058a1ee2d3SJung-uk Kim : "cc");
2068a1ee2d3SJung-uk Kim return (res);
2078a1ee2d3SJung-uk Kim }
2088a1ee2d3SJung-uk Kim
209dfdc9a05SSepherosa Ziehau static __inline int
atomic_testandclear_int(volatile u_int * p,u_int v)210dfdc9a05SSepherosa Ziehau atomic_testandclear_int(volatile u_int *p, u_int v)
211dfdc9a05SSepherosa Ziehau {
212dfdc9a05SSepherosa Ziehau u_char res;
213dfdc9a05SSepherosa Ziehau
214dfdc9a05SSepherosa Ziehau __asm __volatile(
2159c0b759bSKonstantin Belousov " lock; btrl %2,%1 ; "
216dfdc9a05SSepherosa Ziehau " setc %0 ; "
217dfdc9a05SSepherosa Ziehau "# atomic_testandclear_int"
218dfdc9a05SSepherosa Ziehau : "=q" (res), /* 0 */
219dfdc9a05SSepherosa Ziehau "+m" (*p) /* 1 */
220dfdc9a05SSepherosa Ziehau : "Ir" (v & 0x1f) /* 2 */
221dfdc9a05SSepherosa Ziehau : "cc");
222dfdc9a05SSepherosa Ziehau return (res);
223dfdc9a05SSepherosa Ziehau }
224dfdc9a05SSepherosa Ziehau
225fa9f322dSKonstantin Belousov /*
226fa9f322dSKonstantin Belousov * We assume that a = b will do atomic loads and stores. Due to the
227fa9f322dSKonstantin Belousov * IA32 memory model, a simple store guarantees release semantics.
228fa9f322dSKonstantin Belousov *
2297626d062SKonstantin Belousov * However, a load may pass a store if they are performed on distinct
230dd5b6425SKonstantin Belousov * addresses, so we need Store/Load barrier for sequentially
231dd5b6425SKonstantin Belousov * consistent fences in SMP kernels. We use "lock addl $0,mem" for a
232dd5b6425SKonstantin Belousov * Store/Load barrier, as recommended by the AMD Software Optimization
233dd5b6425SKonstantin Belousov * Guide, and not mfence. In the kernel, we use a private per-cpu
2340b6476ecSKonstantin Belousov * cache line for "mem", to avoid introducing false data
2350b6476ecSKonstantin Belousov * dependencies. In user space, we use the word at the top of the
2360b6476ecSKonstantin Belousov * stack.
2377626d062SKonstantin Belousov *
2387626d062SKonstantin Belousov * For UP kernels, however, the memory of the single processor is
2397626d062SKonstantin Belousov * always consistent, so we only need to stop the compiler from
2407626d062SKonstantin Belousov * reordering accesses in a way that violates the semantics of acquire
2417626d062SKonstantin Belousov * and release.
242fa9f322dSKonstantin Belousov */
24348cae112SKonstantin Belousov
2447626d062SKonstantin Belousov #if defined(_KERNEL)
24548cae112SKonstantin Belousov #define __storeload_barrier() __mbk()
2467626d062SKonstantin Belousov #else /* !_KERNEL */
24748cae112SKonstantin Belousov #define __storeload_barrier() __mbu()
2487626d062SKonstantin Belousov #endif /* _KERNEL*/
2497626d062SKonstantin Belousov
2507626d062SKonstantin Belousov #define ATOMIC_LOAD(TYPE) \
2519d979d89SJohn Baldwin static __inline u_##TYPE \
252*5e9a82e8SOlivier Certner atomic_load_acq_##TYPE(const volatile u_##TYPE *p) \
2539d979d89SJohn Baldwin { \
2549d979d89SJohn Baldwin u_##TYPE res; \
2559d979d89SJohn Baldwin \
2567626d062SKonstantin Belousov res = *p; \
2577626d062SKonstantin Belousov __compiler_membar(); \
2589d979d89SJohn Baldwin return (res); \
2599d979d89SJohn Baldwin } \
2606d800f89SBruce Evans struct __hack
2614c5aee92SMark Murray
2627626d062SKonstantin Belousov #define ATOMIC_STORE(TYPE) \
2637626d062SKonstantin Belousov static __inline void \
2647626d062SKonstantin Belousov atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) \
2657626d062SKonstantin Belousov { \
2667626d062SKonstantin Belousov \
2677626d062SKonstantin Belousov __compiler_membar(); \
2687626d062SKonstantin Belousov *p = v; \
2697626d062SKonstantin Belousov } \
2707626d062SKonstantin Belousov struct __hack
2714c5aee92SMark Murray
2728954a9a4SKonstantin Belousov static __inline void
atomic_thread_fence_acq(void)2738954a9a4SKonstantin Belousov atomic_thread_fence_acq(void)
2748954a9a4SKonstantin Belousov {
2758954a9a4SKonstantin Belousov
2768954a9a4SKonstantin Belousov __compiler_membar();
2778954a9a4SKonstantin Belousov }
2788954a9a4SKonstantin Belousov
2798954a9a4SKonstantin Belousov static __inline void
atomic_thread_fence_rel(void)2808954a9a4SKonstantin Belousov atomic_thread_fence_rel(void)
2818954a9a4SKonstantin Belousov {
2828954a9a4SKonstantin Belousov
2838954a9a4SKonstantin Belousov __compiler_membar();
2848954a9a4SKonstantin Belousov }
2858954a9a4SKonstantin Belousov
2868954a9a4SKonstantin Belousov static __inline void
atomic_thread_fence_acq_rel(void)2878954a9a4SKonstantin Belousov atomic_thread_fence_acq_rel(void)
2888954a9a4SKonstantin Belousov {
2898954a9a4SKonstantin Belousov
2908954a9a4SKonstantin Belousov __compiler_membar();
2918954a9a4SKonstantin Belousov }
2928954a9a4SKonstantin Belousov
2938954a9a4SKonstantin Belousov static __inline void
atomic_thread_fence_seq_cst(void)2948954a9a4SKonstantin Belousov atomic_thread_fence_seq_cst(void)
2958954a9a4SKonstantin Belousov {
2968954a9a4SKonstantin Belousov
2978954a9a4SKonstantin Belousov __storeload_barrier();
2988954a9a4SKonstantin Belousov }
2998954a9a4SKonstantin Belousov
3003264fd70SJung-uk Kim #ifdef _KERNEL
3013264fd70SJung-uk Kim
3023264fd70SJung-uk Kim /* I486 does not support SMP or CMPXCHG8B. */
3035188b5f3SJung-uk Kim static __inline int
atomic_cmpset_64_i386(volatile uint64_t * dst,uint64_t expect,uint64_t src)3045188b5f3SJung-uk Kim atomic_cmpset_64_i386(volatile uint64_t *dst, uint64_t expect, uint64_t src)
3055188b5f3SJung-uk Kim {
3065188b5f3SJung-uk Kim volatile uint32_t *p;
3075188b5f3SJung-uk Kim u_char res;
3085188b5f3SJung-uk Kim
3095188b5f3SJung-uk Kim p = (volatile uint32_t *)dst;
3105188b5f3SJung-uk Kim __asm __volatile(
3115188b5f3SJung-uk Kim " pushfl ; "
3125188b5f3SJung-uk Kim " cli ; "
3135188b5f3SJung-uk Kim " xorl %1,%%eax ; "
3145188b5f3SJung-uk Kim " xorl %2,%%edx ; "
3155188b5f3SJung-uk Kim " orl %%edx,%%eax ; "
3165188b5f3SJung-uk Kim " jne 1f ; "
3175188b5f3SJung-uk Kim " movl %4,%1 ; "
3185188b5f3SJung-uk Kim " movl %5,%2 ; "
3195188b5f3SJung-uk Kim "1: "
3205188b5f3SJung-uk Kim " sete %3 ; "
3215188b5f3SJung-uk Kim " popfl"
3225188b5f3SJung-uk Kim : "+A" (expect), /* 0 */
3235188b5f3SJung-uk Kim "+m" (*p), /* 1 */
3245188b5f3SJung-uk Kim "+m" (*(p + 1)), /* 2 */
3255188b5f3SJung-uk Kim "=q" (res) /* 3 */
3265188b5f3SJung-uk Kim : "r" ((uint32_t)src), /* 4 */
3275188b5f3SJung-uk Kim "r" ((uint32_t)(src >> 32)) /* 5 */
3285188b5f3SJung-uk Kim : "memory", "cc");
3295188b5f3SJung-uk Kim return (res);
3305188b5f3SJung-uk Kim }
3315188b5f3SJung-uk Kim
33225a1e0f6SHans Petter Selasky static __inline int
atomic_fcmpset_64_i386(volatile uint64_t * dst,uint64_t * expect,uint64_t src)33325a1e0f6SHans Petter Selasky atomic_fcmpset_64_i386(volatile uint64_t *dst, uint64_t *expect, uint64_t src)
33425a1e0f6SHans Petter Selasky {
33525a1e0f6SHans Petter Selasky
33625a1e0f6SHans Petter Selasky if (atomic_cmpset_64_i386(dst, *expect, src)) {
33725a1e0f6SHans Petter Selasky return (1);
33825a1e0f6SHans Petter Selasky } else {
33925a1e0f6SHans Petter Selasky *expect = *dst;
34025a1e0f6SHans Petter Selasky return (0);
34125a1e0f6SHans Petter Selasky }
34225a1e0f6SHans Petter Selasky }
34325a1e0f6SHans Petter Selasky
3443264fd70SJung-uk Kim static __inline uint64_t
atomic_load_acq_64_i386(const volatile uint64_t * p)345*5e9a82e8SOlivier Certner atomic_load_acq_64_i386(const volatile uint64_t *p)
3463264fd70SJung-uk Kim {
347*5e9a82e8SOlivier Certner const volatile uint32_t *q;
3483264fd70SJung-uk Kim uint64_t res;
3493264fd70SJung-uk Kim
350*5e9a82e8SOlivier Certner q = (const volatile uint32_t *)p;
3513264fd70SJung-uk Kim __asm __volatile(
3523264fd70SJung-uk Kim " pushfl ; "
3533264fd70SJung-uk Kim " cli ; "
3543264fd70SJung-uk Kim " movl %1,%%eax ; "
3553264fd70SJung-uk Kim " movl %2,%%edx ; "
3563264fd70SJung-uk Kim " popfl"
3573264fd70SJung-uk Kim : "=&A" (res) /* 0 */
3583264fd70SJung-uk Kim : "m" (*q), /* 1 */
3593264fd70SJung-uk Kim "m" (*(q + 1)) /* 2 */
3603264fd70SJung-uk Kim : "memory");
3613264fd70SJung-uk Kim return (res);
3623264fd70SJung-uk Kim }
3633264fd70SJung-uk Kim
3643264fd70SJung-uk Kim static __inline void
atomic_store_rel_64_i386(volatile uint64_t * p,uint64_t v)3653264fd70SJung-uk Kim atomic_store_rel_64_i386(volatile uint64_t *p, uint64_t v)
3663264fd70SJung-uk Kim {
3673264fd70SJung-uk Kim volatile uint32_t *q;
3683264fd70SJung-uk Kim
3693264fd70SJung-uk Kim q = (volatile uint32_t *)p;
3703264fd70SJung-uk Kim __asm __volatile(
3713264fd70SJung-uk Kim " pushfl ; "
3723264fd70SJung-uk Kim " cli ; "
3733264fd70SJung-uk Kim " movl %%eax,%0 ; "
3743264fd70SJung-uk Kim " movl %%edx,%1 ; "
3753264fd70SJung-uk Kim " popfl"
3763264fd70SJung-uk Kim : "=m" (*q), /* 0 */
3773264fd70SJung-uk Kim "=m" (*(q + 1)) /* 1 */
3783264fd70SJung-uk Kim : "A" (v) /* 2 */
3793264fd70SJung-uk Kim : "memory");
3803264fd70SJung-uk Kim }
3813264fd70SJung-uk Kim
3823264fd70SJung-uk Kim static __inline uint64_t
atomic_swap_64_i386(volatile uint64_t * p,uint64_t v)3835188b5f3SJung-uk Kim atomic_swap_64_i386(volatile uint64_t *p, uint64_t v)
3845188b5f3SJung-uk Kim {
3855188b5f3SJung-uk Kim volatile uint32_t *q;
3865188b5f3SJung-uk Kim uint64_t res;
3875188b5f3SJung-uk Kim
3885188b5f3SJung-uk Kim q = (volatile uint32_t *)p;
3895188b5f3SJung-uk Kim __asm __volatile(
3905188b5f3SJung-uk Kim " pushfl ; "
3915188b5f3SJung-uk Kim " cli ; "
3925188b5f3SJung-uk Kim " movl %1,%%eax ; "
3935188b5f3SJung-uk Kim " movl %2,%%edx ; "
3945188b5f3SJung-uk Kim " movl %4,%2 ; "
3955188b5f3SJung-uk Kim " movl %3,%1 ; "
3965188b5f3SJung-uk Kim " popfl"
3975188b5f3SJung-uk Kim : "=&A" (res), /* 0 */
3985188b5f3SJung-uk Kim "+m" (*q), /* 1 */
3995188b5f3SJung-uk Kim "+m" (*(q + 1)) /* 2 */
4005188b5f3SJung-uk Kim : "r" ((uint32_t)v), /* 3 */
4015188b5f3SJung-uk Kim "r" ((uint32_t)(v >> 32))); /* 4 */
4025188b5f3SJung-uk Kim return (res);
4035188b5f3SJung-uk Kim }
4045188b5f3SJung-uk Kim
4055188b5f3SJung-uk Kim static __inline int
atomic_cmpset_64_i586(volatile uint64_t * dst,uint64_t expect,uint64_t src)4065188b5f3SJung-uk Kim atomic_cmpset_64_i586(volatile uint64_t *dst, uint64_t expect, uint64_t src)
4075188b5f3SJung-uk Kim {
4085188b5f3SJung-uk Kim u_char res;
4095188b5f3SJung-uk Kim
4105188b5f3SJung-uk Kim __asm __volatile(
4119c0b759bSKonstantin Belousov " lock; cmpxchg8b %1 ; "
4125188b5f3SJung-uk Kim " sete %0"
4135188b5f3SJung-uk Kim : "=q" (res), /* 0 */
4145188b5f3SJung-uk Kim "+m" (*dst), /* 1 */
4155188b5f3SJung-uk Kim "+A" (expect) /* 2 */
4165188b5f3SJung-uk Kim : "b" ((uint32_t)src), /* 3 */
4175188b5f3SJung-uk Kim "c" ((uint32_t)(src >> 32)) /* 4 */
4185188b5f3SJung-uk Kim : "memory", "cc");
4195188b5f3SJung-uk Kim return (res);
4205188b5f3SJung-uk Kim }
4215188b5f3SJung-uk Kim
42225a1e0f6SHans Petter Selasky static __inline int
atomic_fcmpset_64_i586(volatile uint64_t * dst,uint64_t * expect,uint64_t src)42325a1e0f6SHans Petter Selasky atomic_fcmpset_64_i586(volatile uint64_t *dst, uint64_t *expect, uint64_t src)
42425a1e0f6SHans Petter Selasky {
42525a1e0f6SHans Petter Selasky u_char res;
42625a1e0f6SHans Petter Selasky
42725a1e0f6SHans Petter Selasky __asm __volatile(
4289c0b759bSKonstantin Belousov " lock; cmpxchg8b %1 ; "
42925a1e0f6SHans Petter Selasky " sete %0"
43025a1e0f6SHans Petter Selasky : "=q" (res), /* 0 */
43125a1e0f6SHans Petter Selasky "+m" (*dst), /* 1 */
43225a1e0f6SHans Petter Selasky "+A" (*expect) /* 2 */
43325a1e0f6SHans Petter Selasky : "b" ((uint32_t)src), /* 3 */
43425a1e0f6SHans Petter Selasky "c" ((uint32_t)(src >> 32)) /* 4 */
43525a1e0f6SHans Petter Selasky : "memory", "cc");
43625a1e0f6SHans Petter Selasky return (res);
43725a1e0f6SHans Petter Selasky }
43825a1e0f6SHans Petter Selasky
439*5e9a82e8SOlivier Certner /*
440*5e9a82e8SOlivier Certner * Architecturally always writes back some value to '*p' so will trigger
441*5e9a82e8SOlivier Certner * a #GP(0) on read-only mappings.
442*5e9a82e8SOlivier Certner */
4435188b5f3SJung-uk Kim static __inline uint64_t
atomic_load_acq_64_i586(const volatile uint64_t * p)444*5e9a82e8SOlivier Certner atomic_load_acq_64_i586(const volatile uint64_t *p)
4453264fd70SJung-uk Kim {
4463264fd70SJung-uk Kim uint64_t res;
4473264fd70SJung-uk Kim
4483264fd70SJung-uk Kim __asm __volatile(
4493264fd70SJung-uk Kim " movl %%ebx,%%eax ; "
4503264fd70SJung-uk Kim " movl %%ecx,%%edx ; "
4519c0b759bSKonstantin Belousov " lock; cmpxchg8b %1"
452*5e9a82e8SOlivier Certner : "=&A" (res) /* 0 */
453*5e9a82e8SOlivier Certner : "m" (*p) /* 1 */
454*5e9a82e8SOlivier Certner : "memory", "cc");
4553264fd70SJung-uk Kim return (res);
4563264fd70SJung-uk Kim }
4573264fd70SJung-uk Kim
4583264fd70SJung-uk Kim static __inline void
atomic_store_rel_64_i586(volatile uint64_t * p,uint64_t v)4593264fd70SJung-uk Kim atomic_store_rel_64_i586(volatile uint64_t *p, uint64_t v)
4603264fd70SJung-uk Kim {
4613264fd70SJung-uk Kim
4623264fd70SJung-uk Kim __asm __volatile(
4633264fd70SJung-uk Kim " movl %%eax,%%ebx ; "
4643264fd70SJung-uk Kim " movl %%edx,%%ecx ; "
4653264fd70SJung-uk Kim "1: "
4669c0b759bSKonstantin Belousov " lock; cmpxchg8b %0 ; "
4673264fd70SJung-uk Kim " jne 1b"
4683264fd70SJung-uk Kim : "+m" (*p), /* 0 */
4693264fd70SJung-uk Kim "+A" (v) /* 1 */
4703264fd70SJung-uk Kim : : "ebx", "ecx", "memory", "cc");
4713264fd70SJung-uk Kim }
4723264fd70SJung-uk Kim
4733264fd70SJung-uk Kim static __inline uint64_t
atomic_swap_64_i586(volatile uint64_t * p,uint64_t v)4745188b5f3SJung-uk Kim atomic_swap_64_i586(volatile uint64_t *p, uint64_t v)
4755188b5f3SJung-uk Kim {
4765188b5f3SJung-uk Kim
4775188b5f3SJung-uk Kim __asm __volatile(
4785188b5f3SJung-uk Kim " movl %%eax,%%ebx ; "
4795188b5f3SJung-uk Kim " movl %%edx,%%ecx ; "
4805188b5f3SJung-uk Kim "1: "
4819c0b759bSKonstantin Belousov " lock; cmpxchg8b %0 ; "
4825188b5f3SJung-uk Kim " jne 1b"
4835188b5f3SJung-uk Kim : "+m" (*p), /* 0 */
4845188b5f3SJung-uk Kim "+A" (v) /* 1 */
4855188b5f3SJung-uk Kim : : "ebx", "ecx", "memory", "cc");
4865188b5f3SJung-uk Kim return (v);
4875188b5f3SJung-uk Kim }
4885188b5f3SJung-uk Kim
4895188b5f3SJung-uk Kim static __inline int
atomic_cmpset_64(volatile uint64_t * dst,uint64_t expect,uint64_t src)4905188b5f3SJung-uk Kim atomic_cmpset_64(volatile uint64_t *dst, uint64_t expect, uint64_t src)
4915188b5f3SJung-uk Kim {
4925188b5f3SJung-uk Kim
4935188b5f3SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0)
4945188b5f3SJung-uk Kim return (atomic_cmpset_64_i386(dst, expect, src));
4955188b5f3SJung-uk Kim else
4965188b5f3SJung-uk Kim return (atomic_cmpset_64_i586(dst, expect, src));
4975188b5f3SJung-uk Kim }
4985188b5f3SJung-uk Kim
49925a1e0f6SHans Petter Selasky static __inline int
atomic_fcmpset_64(volatile uint64_t * dst,uint64_t * expect,uint64_t src)50025a1e0f6SHans Petter Selasky atomic_fcmpset_64(volatile uint64_t *dst, uint64_t *expect, uint64_t src)
50125a1e0f6SHans Petter Selasky {
50225a1e0f6SHans Petter Selasky
50325a1e0f6SHans Petter Selasky if ((cpu_feature & CPUID_CX8) == 0)
50425a1e0f6SHans Petter Selasky return (atomic_fcmpset_64_i386(dst, expect, src));
50525a1e0f6SHans Petter Selasky else
50625a1e0f6SHans Petter Selasky return (atomic_fcmpset_64_i586(dst, expect, src));
50725a1e0f6SHans Petter Selasky }
50825a1e0f6SHans Petter Selasky
5095188b5f3SJung-uk Kim static __inline uint64_t
atomic_load_acq_64(const volatile uint64_t * p)510*5e9a82e8SOlivier Certner atomic_load_acq_64(const volatile uint64_t *p)
5113264fd70SJung-uk Kim {
5123264fd70SJung-uk Kim
5133264fd70SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0)
5143264fd70SJung-uk Kim return (atomic_load_acq_64_i386(p));
5153264fd70SJung-uk Kim else
5163264fd70SJung-uk Kim return (atomic_load_acq_64_i586(p));
5173264fd70SJung-uk Kim }
5183264fd70SJung-uk Kim
5193264fd70SJung-uk Kim static __inline void
atomic_store_rel_64(volatile uint64_t * p,uint64_t v)5203264fd70SJung-uk Kim atomic_store_rel_64(volatile uint64_t *p, uint64_t v)
5213264fd70SJung-uk Kim {
5223264fd70SJung-uk Kim
5233264fd70SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0)
5243264fd70SJung-uk Kim atomic_store_rel_64_i386(p, v);
5253264fd70SJung-uk Kim else
5263264fd70SJung-uk Kim atomic_store_rel_64_i586(p, v);
5273264fd70SJung-uk Kim }
5283264fd70SJung-uk Kim
5295188b5f3SJung-uk Kim static __inline uint64_t
atomic_swap_64(volatile uint64_t * p,uint64_t v)5305188b5f3SJung-uk Kim atomic_swap_64(volatile uint64_t *p, uint64_t v)
5315188b5f3SJung-uk Kim {
5325188b5f3SJung-uk Kim
5335188b5f3SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0)
5345188b5f3SJung-uk Kim return (atomic_swap_64_i386(p, v));
5355188b5f3SJung-uk Kim else
5365188b5f3SJung-uk Kim return (atomic_swap_64_i586(p, v));
5375188b5f3SJung-uk Kim }
5385188b5f3SJung-uk Kim
539322f006eSHans Petter Selasky static __inline uint64_t
atomic_fetchadd_64(volatile uint64_t * p,uint64_t v)540322f006eSHans Petter Selasky atomic_fetchadd_64(volatile uint64_t *p, uint64_t v)
541322f006eSHans Petter Selasky {
542322f006eSHans Petter Selasky
543322f006eSHans Petter Selasky for (;;) {
544322f006eSHans Petter Selasky uint64_t t = *p;
545322f006eSHans Petter Selasky if (atomic_cmpset_64(p, t, t + v))
546322f006eSHans Petter Selasky return (t);
547322f006eSHans Petter Selasky }
548322f006eSHans Petter Selasky }
549322f006eSHans Petter Selasky
55043bb1274SHans Petter Selasky static __inline void
atomic_add_64(volatile uint64_t * p,uint64_t v)55143bb1274SHans Petter Selasky atomic_add_64(volatile uint64_t *p, uint64_t v)
55243bb1274SHans Petter Selasky {
55343bb1274SHans Petter Selasky uint64_t t;
55443bb1274SHans Petter Selasky
55543bb1274SHans Petter Selasky for (;;) {
55643bb1274SHans Petter Selasky t = *p;
55743bb1274SHans Petter Selasky if (atomic_cmpset_64(p, t, t + v))
55843bb1274SHans Petter Selasky break;
55943bb1274SHans Petter Selasky }
56043bb1274SHans Petter Selasky }
56143bb1274SHans Petter Selasky
56243bb1274SHans Petter Selasky static __inline void
atomic_subtract_64(volatile uint64_t * p,uint64_t v)56343bb1274SHans Petter Selasky atomic_subtract_64(volatile uint64_t *p, uint64_t v)
56443bb1274SHans Petter Selasky {
56543bb1274SHans Petter Selasky uint64_t t;
56643bb1274SHans Petter Selasky
56743bb1274SHans Petter Selasky for (;;) {
56843bb1274SHans Petter Selasky t = *p;
56943bb1274SHans Petter Selasky if (atomic_cmpset_64(p, t, t - v))
57043bb1274SHans Petter Selasky break;
57143bb1274SHans Petter Selasky }
57243bb1274SHans Petter Selasky }
57343bb1274SHans Petter Selasky
5743264fd70SJung-uk Kim #endif /* _KERNEL */
5753264fd70SJung-uk Kim
5768306a37bSMark Murray ATOMIC_ASM(set, char, "orb %b1,%0", "iq", v);
5778306a37bSMark Murray ATOMIC_ASM(clear, char, "andb %b1,%0", "iq", ~v);
5788306a37bSMark Murray ATOMIC_ASM(add, char, "addb %b1,%0", "iq", v);
5798306a37bSMark Murray ATOMIC_ASM(subtract, char, "subb %b1,%0", "iq", v);
5808a6b1c8fSJohn Baldwin
5818306a37bSMark Murray ATOMIC_ASM(set, short, "orw %w1,%0", "ir", v);
5828306a37bSMark Murray ATOMIC_ASM(clear, short, "andw %w1,%0", "ir", ~v);
5838306a37bSMark Murray ATOMIC_ASM(add, short, "addw %w1,%0", "ir", v);
5848306a37bSMark Murray ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir", v);
5858a6b1c8fSJohn Baldwin
5868306a37bSMark Murray ATOMIC_ASM(set, int, "orl %1,%0", "ir", v);
5878306a37bSMark Murray ATOMIC_ASM(clear, int, "andl %1,%0", "ir", ~v);
5888306a37bSMark Murray ATOMIC_ASM(add, int, "addl %1,%0", "ir", v);
5898306a37bSMark Murray ATOMIC_ASM(subtract, int, "subl %1,%0", "ir", v);
5908a6b1c8fSJohn Baldwin
5918306a37bSMark Murray ATOMIC_ASM(set, long, "orl %1,%0", "ir", v);
5928306a37bSMark Murray ATOMIC_ASM(clear, long, "andl %1,%0", "ir", ~v);
5938306a37bSMark Murray ATOMIC_ASM(add, long, "addl %1,%0", "ir", v);
5948306a37bSMark Murray ATOMIC_ASM(subtract, long, "subl %1,%0", "ir", v);
5959d979d89SJohn Baldwin
5967626d062SKonstantin Belousov #define ATOMIC_LOADSTORE(TYPE) \
5977626d062SKonstantin Belousov ATOMIC_LOAD(TYPE); \
5987626d062SKonstantin Belousov ATOMIC_STORE(TYPE)
599fa9f322dSKonstantin Belousov
6007626d062SKonstantin Belousov ATOMIC_LOADSTORE(char);
6017626d062SKonstantin Belousov ATOMIC_LOADSTORE(short);
6027626d062SKonstantin Belousov ATOMIC_LOADSTORE(int);
6037626d062SKonstantin Belousov ATOMIC_LOADSTORE(long);
604ccbdd9eeSJohn Baldwin
6058a6b1c8fSJohn Baldwin #undef ATOMIC_ASM
606fa9f322dSKonstantin Belousov #undef ATOMIC_LOAD
607fa9f322dSKonstantin Belousov #undef ATOMIC_STORE
6087626d062SKonstantin Belousov #undef ATOMIC_LOADSTORE
609ccbdd9eeSJohn Baldwin
61048281036SJohn Baldwin static __inline int
atomic_cmpset_long(volatile u_long * dst,u_long expect,u_long src)611065b12a7SPoul-Henning Kamp atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src)
61248281036SJohn Baldwin {
61348281036SJohn Baldwin
614065b12a7SPoul-Henning Kamp return (atomic_cmpset_int((volatile u_int *)dst, (u_int)expect,
61548281036SJohn Baldwin (u_int)src));
61648281036SJohn Baldwin }
61748281036SJohn Baldwin
61825a1e0f6SHans Petter Selasky static __inline int
atomic_fcmpset_long(volatile u_long * dst,u_long * expect,u_long src)61925a1e0f6SHans Petter Selasky atomic_fcmpset_long(volatile u_long *dst, u_long *expect, u_long src)
62025a1e0f6SHans Petter Selasky {
62125a1e0f6SHans Petter Selasky
62225a1e0f6SHans Petter Selasky return (atomic_fcmpset_int((volatile u_int *)dst, (u_int *)expect,
62325a1e0f6SHans Petter Selasky (u_int)src));
62425a1e0f6SHans Petter Selasky }
62525a1e0f6SHans Petter Selasky
6266eb4157fSPawel Jakub Dawidek static __inline u_long
atomic_fetchadd_long(volatile u_long * p,u_long v)6276eb4157fSPawel Jakub Dawidek atomic_fetchadd_long(volatile u_long *p, u_long v)
6286eb4157fSPawel Jakub Dawidek {
6296eb4157fSPawel Jakub Dawidek
6306eb4157fSPawel Jakub Dawidek return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v));
6316eb4157fSPawel Jakub Dawidek }
6326eb4157fSPawel Jakub Dawidek
6338a1ee2d3SJung-uk Kim static __inline int
atomic_testandset_long(volatile u_long * p,u_int v)6348a1ee2d3SJung-uk Kim atomic_testandset_long(volatile u_long *p, u_int v)
6358a1ee2d3SJung-uk Kim {
6368a1ee2d3SJung-uk Kim
6378a1ee2d3SJung-uk Kim return (atomic_testandset_int((volatile u_int *)p, v));
6388a1ee2d3SJung-uk Kim }
6398a1ee2d3SJung-uk Kim
640dfdc9a05SSepherosa Ziehau static __inline int
atomic_testandclear_long(volatile u_long * p,u_int v)641dfdc9a05SSepherosa Ziehau atomic_testandclear_long(volatile u_long *p, u_int v)
642dfdc9a05SSepherosa Ziehau {
643dfdc9a05SSepherosa Ziehau
644dfdc9a05SSepherosa Ziehau return (atomic_testandclear_int((volatile u_int *)p, v));
645dfdc9a05SSepherosa Ziehau }
646dfdc9a05SSepherosa Ziehau
6478a1ee2d3SJung-uk Kim /* Read the current value and store a new value in the destination. */
64848281036SJohn Baldwin static __inline u_int
atomic_swap_int(volatile u_int * p,u_int v)6498a1ee2d3SJung-uk Kim atomic_swap_int(volatile u_int *p, u_int v)
65048281036SJohn Baldwin {
65148281036SJohn Baldwin
65248281036SJohn Baldwin __asm __volatile(
65348281036SJohn Baldwin " xchgl %1,%0 ; "
6548a1ee2d3SJung-uk Kim "# atomic_swap_int"
6558a1ee2d3SJung-uk Kim : "+r" (v), /* 0 */
656fe94be3dSJung-uk Kim "+m" (*p)); /* 1 */
6578a1ee2d3SJung-uk Kim return (v);
65848281036SJohn Baldwin }
65948281036SJohn Baldwin
66048281036SJohn Baldwin static __inline u_long
atomic_swap_long(volatile u_long * p,u_long v)6618a1ee2d3SJung-uk Kim atomic_swap_long(volatile u_long *p, u_long v)
66248281036SJohn Baldwin {
66348281036SJohn Baldwin
6648a1ee2d3SJung-uk Kim return (atomic_swap_int((volatile u_int *)p, (u_int)v));
66548281036SJohn Baldwin }
66648281036SJohn Baldwin
66786d2e48cSAttilio Rao #define atomic_set_acq_char atomic_set_barr_char
66886d2e48cSAttilio Rao #define atomic_set_rel_char atomic_set_barr_char
66986d2e48cSAttilio Rao #define atomic_clear_acq_char atomic_clear_barr_char
67086d2e48cSAttilio Rao #define atomic_clear_rel_char atomic_clear_barr_char
67186d2e48cSAttilio Rao #define atomic_add_acq_char atomic_add_barr_char
67286d2e48cSAttilio Rao #define atomic_add_rel_char atomic_add_barr_char
67386d2e48cSAttilio Rao #define atomic_subtract_acq_char atomic_subtract_barr_char
67486d2e48cSAttilio Rao #define atomic_subtract_rel_char atomic_subtract_barr_char
6753d673254SMark Johnston #define atomic_cmpset_acq_char atomic_cmpset_char
6763d673254SMark Johnston #define atomic_cmpset_rel_char atomic_cmpset_char
6773d673254SMark Johnston #define atomic_fcmpset_acq_char atomic_fcmpset_char
6783d673254SMark Johnston #define atomic_fcmpset_rel_char atomic_fcmpset_char
6798a6b1c8fSJohn Baldwin
68086d2e48cSAttilio Rao #define atomic_set_acq_short atomic_set_barr_short
68186d2e48cSAttilio Rao #define atomic_set_rel_short atomic_set_barr_short
68286d2e48cSAttilio Rao #define atomic_clear_acq_short atomic_clear_barr_short
68386d2e48cSAttilio Rao #define atomic_clear_rel_short atomic_clear_barr_short
68486d2e48cSAttilio Rao #define atomic_add_acq_short atomic_add_barr_short
68586d2e48cSAttilio Rao #define atomic_add_rel_short atomic_add_barr_short
68686d2e48cSAttilio Rao #define atomic_subtract_acq_short atomic_subtract_barr_short
68786d2e48cSAttilio Rao #define atomic_subtract_rel_short atomic_subtract_barr_short
6883d673254SMark Johnston #define atomic_cmpset_acq_short atomic_cmpset_short
6893d673254SMark Johnston #define atomic_cmpset_rel_short atomic_cmpset_short
6903d673254SMark Johnston #define atomic_fcmpset_acq_short atomic_fcmpset_short
6913d673254SMark Johnston #define atomic_fcmpset_rel_short atomic_fcmpset_short
6928a6b1c8fSJohn Baldwin
69386d2e48cSAttilio Rao #define atomic_set_acq_int atomic_set_barr_int
69486d2e48cSAttilio Rao #define atomic_set_rel_int atomic_set_barr_int
69586d2e48cSAttilio Rao #define atomic_clear_acq_int atomic_clear_barr_int
69686d2e48cSAttilio Rao #define atomic_clear_rel_int atomic_clear_barr_int
69786d2e48cSAttilio Rao #define atomic_add_acq_int atomic_add_barr_int
69886d2e48cSAttilio Rao #define atomic_add_rel_int atomic_add_barr_int
69986d2e48cSAttilio Rao #define atomic_subtract_acq_int atomic_subtract_barr_int
70086d2e48cSAttilio Rao #define atomic_subtract_rel_int atomic_subtract_barr_int
7018448afceSAttilio Rao #define atomic_cmpset_acq_int atomic_cmpset_int
7028448afceSAttilio Rao #define atomic_cmpset_rel_int atomic_cmpset_int
703e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_int atomic_fcmpset_int
704e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_int atomic_fcmpset_int
7058a6b1c8fSJohn Baldwin
70686d2e48cSAttilio Rao #define atomic_set_acq_long atomic_set_barr_long
70786d2e48cSAttilio Rao #define atomic_set_rel_long atomic_set_barr_long
70886d2e48cSAttilio Rao #define atomic_clear_acq_long atomic_clear_barr_long
70986d2e48cSAttilio Rao #define atomic_clear_rel_long atomic_clear_barr_long
71086d2e48cSAttilio Rao #define atomic_add_acq_long atomic_add_barr_long
71186d2e48cSAttilio Rao #define atomic_add_rel_long atomic_add_barr_long
71286d2e48cSAttilio Rao #define atomic_subtract_acq_long atomic_subtract_barr_long
71386d2e48cSAttilio Rao #define atomic_subtract_rel_long atomic_subtract_barr_long
7148448afceSAttilio Rao #define atomic_cmpset_acq_long atomic_cmpset_long
7158448afceSAttilio Rao #define atomic_cmpset_rel_long atomic_cmpset_long
716e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_long atomic_fcmpset_long
717e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_long atomic_fcmpset_long
7188a6b1c8fSJohn Baldwin
7198a1ee2d3SJung-uk Kim #define atomic_readandclear_int(p) atomic_swap_int(p, 0)
7208a1ee2d3SJung-uk Kim #define atomic_readandclear_long(p) atomic_swap_long(p, 0)
721ca0ec73cSConrad Meyer #define atomic_testandset_acq_long atomic_testandset_long
7228a1ee2d3SJung-uk Kim
72348281036SJohn Baldwin /* Operations on 8-bit bytes. */
7248a6b1c8fSJohn Baldwin #define atomic_set_8 atomic_set_char
7258a6b1c8fSJohn Baldwin #define atomic_set_acq_8 atomic_set_acq_char
7268a6b1c8fSJohn Baldwin #define atomic_set_rel_8 atomic_set_rel_char
7278a6b1c8fSJohn Baldwin #define atomic_clear_8 atomic_clear_char
7288a6b1c8fSJohn Baldwin #define atomic_clear_acq_8 atomic_clear_acq_char
7298a6b1c8fSJohn Baldwin #define atomic_clear_rel_8 atomic_clear_rel_char
7308a6b1c8fSJohn Baldwin #define atomic_add_8 atomic_add_char
7318a6b1c8fSJohn Baldwin #define atomic_add_acq_8 atomic_add_acq_char
7328a6b1c8fSJohn Baldwin #define atomic_add_rel_8 atomic_add_rel_char
7338a6b1c8fSJohn Baldwin #define atomic_subtract_8 atomic_subtract_char
7348a6b1c8fSJohn Baldwin #define atomic_subtract_acq_8 atomic_subtract_acq_char
7358a6b1c8fSJohn Baldwin #define atomic_subtract_rel_8 atomic_subtract_rel_char
7368a6b1c8fSJohn Baldwin #define atomic_load_acq_8 atomic_load_acq_char
7378a6b1c8fSJohn Baldwin #define atomic_store_rel_8 atomic_store_rel_char
7383d673254SMark Johnston #define atomic_cmpset_8 atomic_cmpset_char
7393d673254SMark Johnston #define atomic_cmpset_acq_8 atomic_cmpset_acq_char
7403d673254SMark Johnston #define atomic_cmpset_rel_8 atomic_cmpset_rel_char
7413d673254SMark Johnston #define atomic_fcmpset_8 atomic_fcmpset_char
7423d673254SMark Johnston #define atomic_fcmpset_acq_8 atomic_fcmpset_acq_char
7433d673254SMark Johnston #define atomic_fcmpset_rel_8 atomic_fcmpset_rel_char
7448a6b1c8fSJohn Baldwin
74548281036SJohn Baldwin /* Operations on 16-bit words. */
7468a6b1c8fSJohn Baldwin #define atomic_set_16 atomic_set_short
7478a6b1c8fSJohn Baldwin #define atomic_set_acq_16 atomic_set_acq_short
7488a6b1c8fSJohn Baldwin #define atomic_set_rel_16 atomic_set_rel_short
7498a6b1c8fSJohn Baldwin #define atomic_clear_16 atomic_clear_short
7508a6b1c8fSJohn Baldwin #define atomic_clear_acq_16 atomic_clear_acq_short
7518a6b1c8fSJohn Baldwin #define atomic_clear_rel_16 atomic_clear_rel_short
7528a6b1c8fSJohn Baldwin #define atomic_add_16 atomic_add_short
7538a6b1c8fSJohn Baldwin #define atomic_add_acq_16 atomic_add_acq_short
7548a6b1c8fSJohn Baldwin #define atomic_add_rel_16 atomic_add_rel_short
7558a6b1c8fSJohn Baldwin #define atomic_subtract_16 atomic_subtract_short
7568a6b1c8fSJohn Baldwin #define atomic_subtract_acq_16 atomic_subtract_acq_short
7578a6b1c8fSJohn Baldwin #define atomic_subtract_rel_16 atomic_subtract_rel_short
7588a6b1c8fSJohn Baldwin #define atomic_load_acq_16 atomic_load_acq_short
7598a6b1c8fSJohn Baldwin #define atomic_store_rel_16 atomic_store_rel_short
7603d673254SMark Johnston #define atomic_cmpset_16 atomic_cmpset_short
7613d673254SMark Johnston #define atomic_cmpset_acq_16 atomic_cmpset_acq_short
7623d673254SMark Johnston #define atomic_cmpset_rel_16 atomic_cmpset_rel_short
7633d673254SMark Johnston #define atomic_fcmpset_16 atomic_fcmpset_short
7643d673254SMark Johnston #define atomic_fcmpset_acq_16 atomic_fcmpset_acq_short
7653d673254SMark Johnston #define atomic_fcmpset_rel_16 atomic_fcmpset_rel_short
7668a6b1c8fSJohn Baldwin
76748281036SJohn Baldwin /* Operations on 32-bit double words. */
7688a6b1c8fSJohn Baldwin #define atomic_set_32 atomic_set_int
7698a6b1c8fSJohn Baldwin #define atomic_set_acq_32 atomic_set_acq_int
7708a6b1c8fSJohn Baldwin #define atomic_set_rel_32 atomic_set_rel_int
7718a6b1c8fSJohn Baldwin #define atomic_clear_32 atomic_clear_int
7728a6b1c8fSJohn Baldwin #define atomic_clear_acq_32 atomic_clear_acq_int
7738a6b1c8fSJohn Baldwin #define atomic_clear_rel_32 atomic_clear_rel_int
7748a6b1c8fSJohn Baldwin #define atomic_add_32 atomic_add_int
7758a6b1c8fSJohn Baldwin #define atomic_add_acq_32 atomic_add_acq_int
7768a6b1c8fSJohn Baldwin #define atomic_add_rel_32 atomic_add_rel_int
7778a6b1c8fSJohn Baldwin #define atomic_subtract_32 atomic_subtract_int
7788a6b1c8fSJohn Baldwin #define atomic_subtract_acq_32 atomic_subtract_acq_int
7798a6b1c8fSJohn Baldwin #define atomic_subtract_rel_32 atomic_subtract_rel_int
7808a6b1c8fSJohn Baldwin #define atomic_load_acq_32 atomic_load_acq_int
7818a6b1c8fSJohn Baldwin #define atomic_store_rel_32 atomic_store_rel_int
7828a6b1c8fSJohn Baldwin #define atomic_cmpset_32 atomic_cmpset_int
7838a6b1c8fSJohn Baldwin #define atomic_cmpset_acq_32 atomic_cmpset_acq_int
7848a6b1c8fSJohn Baldwin #define atomic_cmpset_rel_32 atomic_cmpset_rel_int
785e7a98aefSMateusz Guzik #define atomic_fcmpset_32 atomic_fcmpset_int
786e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_32 atomic_fcmpset_acq_int
787e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_32 atomic_fcmpset_rel_int
7888a1ee2d3SJung-uk Kim #define atomic_swap_32 atomic_swap_int
7898a6b1c8fSJohn Baldwin #define atomic_readandclear_32 atomic_readandclear_int
7903c2bc2bfSJohn Baldwin #define atomic_fetchadd_32 atomic_fetchadd_int
7918a1ee2d3SJung-uk Kim #define atomic_testandset_32 atomic_testandset_int
792dfdc9a05SSepherosa Ziehau #define atomic_testandclear_32 atomic_testandclear_int
7938a6b1c8fSJohn Baldwin
794db8bee42SAndriy Gapon #ifdef _KERNEL
79543bb1274SHans Petter Selasky /* Operations on 64-bit quad words. */
79643bb1274SHans Petter Selasky #define atomic_cmpset_acq_64 atomic_cmpset_64
79743bb1274SHans Petter Selasky #define atomic_cmpset_rel_64 atomic_cmpset_64
79825a1e0f6SHans Petter Selasky #define atomic_fcmpset_acq_64 atomic_fcmpset_64
79925a1e0f6SHans Petter Selasky #define atomic_fcmpset_rel_64 atomic_fcmpset_64
80043bb1274SHans Petter Selasky #define atomic_fetchadd_acq_64 atomic_fetchadd_64
80143bb1274SHans Petter Selasky #define atomic_fetchadd_rel_64 atomic_fetchadd_64
80243bb1274SHans Petter Selasky #define atomic_add_acq_64 atomic_add_64
80343bb1274SHans Petter Selasky #define atomic_add_rel_64 atomic_add_64
80443bb1274SHans Petter Selasky #define atomic_subtract_acq_64 atomic_subtract_64
80543bb1274SHans Petter Selasky #define atomic_subtract_rel_64 atomic_subtract_64
8063a91d106SKonstantin Belousov #define atomic_load_64 atomic_load_acq_64
8073a91d106SKonstantin Belousov #define atomic_store_64 atomic_store_rel_64
808db8bee42SAndriy Gapon #endif
80943bb1274SHans Petter Selasky
81048281036SJohn Baldwin /* Operations on pointers. */
8116f0f8ccaSDag-Erling Smørgrav #define atomic_set_ptr(p, v) \
8126f0f8ccaSDag-Erling Smørgrav atomic_set_int((volatile u_int *)(p), (u_int)(v))
8136f0f8ccaSDag-Erling Smørgrav #define atomic_set_acq_ptr(p, v) \
8146f0f8ccaSDag-Erling Smørgrav atomic_set_acq_int((volatile u_int *)(p), (u_int)(v))
8156f0f8ccaSDag-Erling Smørgrav #define atomic_set_rel_ptr(p, v) \
8166f0f8ccaSDag-Erling Smørgrav atomic_set_rel_int((volatile u_int *)(p), (u_int)(v))
8176f0f8ccaSDag-Erling Smørgrav #define atomic_clear_ptr(p, v) \
8186f0f8ccaSDag-Erling Smørgrav atomic_clear_int((volatile u_int *)(p), (u_int)(v))
8196f0f8ccaSDag-Erling Smørgrav #define atomic_clear_acq_ptr(p, v) \
8206f0f8ccaSDag-Erling Smørgrav atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v))
8216f0f8ccaSDag-Erling Smørgrav #define atomic_clear_rel_ptr(p, v) \
8226f0f8ccaSDag-Erling Smørgrav atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v))
8236f0f8ccaSDag-Erling Smørgrav #define atomic_add_ptr(p, v) \
8246f0f8ccaSDag-Erling Smørgrav atomic_add_int((volatile u_int *)(p), (u_int)(v))
8256f0f8ccaSDag-Erling Smørgrav #define atomic_add_acq_ptr(p, v) \
8266f0f8ccaSDag-Erling Smørgrav atomic_add_acq_int((volatile u_int *)(p), (u_int)(v))
8276f0f8ccaSDag-Erling Smørgrav #define atomic_add_rel_ptr(p, v) \
8286f0f8ccaSDag-Erling Smørgrav atomic_add_rel_int((volatile u_int *)(p), (u_int)(v))
8296f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_ptr(p, v) \
8306f0f8ccaSDag-Erling Smørgrav atomic_subtract_int((volatile u_int *)(p), (u_int)(v))
8316f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_acq_ptr(p, v) \
8326f0f8ccaSDag-Erling Smørgrav atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v))
8336f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_rel_ptr(p, v) \
8346f0f8ccaSDag-Erling Smørgrav atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v))
8356f0f8ccaSDag-Erling Smørgrav #define atomic_load_acq_ptr(p) \
836*5e9a82e8SOlivier Certner atomic_load_acq_int((const volatile u_int *)(p))
8376f0f8ccaSDag-Erling Smørgrav #define atomic_store_rel_ptr(p, v) \
8386f0f8ccaSDag-Erling Smørgrav atomic_store_rel_int((volatile u_int *)(p), (v))
8396f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_ptr(dst, old, new) \
8406f0f8ccaSDag-Erling Smørgrav atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
8416f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_acq_ptr(dst, old, new) \
8426c296ffaSBruce Evans atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \
8436c296ffaSBruce Evans (u_int)(new))
8446f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_rel_ptr(dst, old, new) \
8456c296ffaSBruce Evans atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \
8466c296ffaSBruce Evans (u_int)(new))
847e7a98aefSMateusz Guzik #define atomic_fcmpset_ptr(dst, old, new) \
848e7a98aefSMateusz Guzik atomic_fcmpset_int((volatile u_int *)(dst), (u_int *)(old), (u_int)(new))
849e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_ptr(dst, old, new) \
850e7a98aefSMateusz Guzik atomic_fcmpset_acq_int((volatile u_int *)(dst), (u_int *)(old), \
851e7a98aefSMateusz Guzik (u_int)(new))
852e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_ptr(dst, old, new) \
853e7a98aefSMateusz Guzik atomic_fcmpset_rel_int((volatile u_int *)(dst), (u_int *)(old), \
854e7a98aefSMateusz Guzik (u_int)(new))
8558a1ee2d3SJung-uk Kim #define atomic_swap_ptr(p, v) \
8568a1ee2d3SJung-uk Kim atomic_swap_int((volatile u_int *)(p), (u_int)(v))
8576f0f8ccaSDag-Erling Smørgrav #define atomic_readandclear_ptr(p) \
8586f0f8ccaSDag-Erling Smørgrav atomic_readandclear_int((volatile u_int *)(p))
859a80b9ee1SJohn Baldwin #define atomic_testandclear_ptr(p, val) \
860a80b9ee1SJohn Baldwin atomic_testandclear_int((volatile u_int *)(p), (val))
861a80b9ee1SJohn Baldwin #define atomic_testandset_ptr(p, val) \
862a80b9ee1SJohn Baldwin atomic_testandset_int((volatile u_int *)(p), (val))
863ccbdd9eeSJohn Baldwin
86448cae112SKonstantin Belousov #if defined(_KERNEL)
86548cae112SKonstantin Belousov #define mb() __mbk()
86648cae112SKonstantin Belousov #define wmb() __mbk()
86748cae112SKonstantin Belousov #define rmb() __mbk()
86848cae112SKonstantin Belousov #else
86948cae112SKonstantin Belousov #define mb() __mbu()
87048cae112SKonstantin Belousov #define wmb() __mbu()
87148cae112SKonstantin Belousov #define rmb() __mbu()
87248cae112SKonstantin Belousov #endif
87348cae112SKonstantin Belousov
874069e9bc1SDoug Rabson #endif /* !_MACHINE_ATOMIC_H_ */
875