1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Copyright (C) 2007-2010 Lawrence Livermore National Security, LLC.
4 * Copyright (C) 2007 The Regents of the University of California.
5 * Produced at Lawrence Livermore National Laboratory (cf, DISCLAIMER).
6 * Written by Brian Behlendorf <behlendorf1@llnl.gov>.
7 * UCRL-CODE-235197
8 *
9 * This file is part of the SPL, Solaris Porting Layer.
10 *
11 * The SPL is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the
13 * Free Software Foundation; either version 2 of the License, or (at your
14 * option) any later version.
15 *
16 * The SPL is distributed in the hope that it will be useful, but WITHOUT
17 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 * for more details.
20 *
21 * You should have received a copy of the GNU General Public License along
22 * with the SPL. If not, see <http://www.gnu.org/licenses/>.
23 */
24
25 #ifndef _SPL_ATOMIC_H
26 #define _SPL_ATOMIC_H
27
28 #include <linux/module.h>
29 #include <linux/spinlock.h>
30 #include <sys/types.h>
31
32 /*
33 * Map the atomic_* functions to the Linux counterparts. This relies on the
34 * fact that the atomic types are internally really a uint32 or uint64. If
35 * this were to change an alternate approach would be needed.
36 *
37 * N.B. Due to the limitations of the original API atomicity is not strictly
38 * preserved when using the 64-bit functions on a 32-bit system. In order
39 * to support this all consumers would need to be updated to use the Linux
40 * provided atomic_t and atomic64_t types.
41 */
42 #define atomic_inc_32(v) atomic_inc((atomic_t *)(v))
43 #define atomic_dec_32(v) atomic_dec((atomic_t *)(v))
44 #define atomic_add_32(v, i) atomic_add((i), (atomic_t *)(v))
45 #define atomic_sub_32(v, i) atomic_sub((i), (atomic_t *)(v))
46 #define atomic_inc_32_nv(v) atomic_inc_return((atomic_t *)(v))
47 #define atomic_dec_32_nv(v) atomic_dec_return((atomic_t *)(v))
48 #define atomic_add_32_nv(v, i) atomic_add_return((i), (atomic_t *)(v))
49 #define atomic_sub_32_nv(v, i) atomic_sub_return((i), (atomic_t *)(v))
50 #define atomic_cas_32(v, x, y) atomic_cmpxchg((atomic_t *)(v), x, y)
51 #define atomic_swap_32(v, x) atomic_xchg((atomic_t *)(v), x)
52 #define atomic_load_32(v) atomic_read((atomic_t *)(v))
53 #define atomic_store_32(v, x) atomic_set((atomic_t *)(v), x)
54 #define atomic_inc_64(v) atomic64_inc((atomic64_t *)(v))
55 #define atomic_dec_64(v) atomic64_dec((atomic64_t *)(v))
56 #define atomic_add_64(v, i) atomic64_add((i), (atomic64_t *)(v))
57 #define atomic_sub_64(v, i) atomic64_sub((i), (atomic64_t *)(v))
58 #define atomic_inc_64_nv(v) atomic64_inc_return((atomic64_t *)(v))
59 #define atomic_dec_64_nv(v) atomic64_dec_return((atomic64_t *)(v))
60 #define atomic_add_64_nv(v, i) atomic64_add_return((i), (atomic64_t *)(v))
61 #define atomic_sub_64_nv(v, i) atomic64_sub_return((i), (atomic64_t *)(v))
62 #define atomic_cas_64(v, x, y) atomic64_cmpxchg((atomic64_t *)(v), x, y)
63 #define atomic_swap_64(v, x) atomic64_xchg((atomic64_t *)(v), x)
64 #define atomic_load_64(v) atomic64_read((atomic64_t *)(v))
65 #define atomic_store_64(v, x) atomic64_set((atomic64_t *)(v), x)
66
67 #ifdef _LP64
68 static __inline__ void *
atomic_cas_ptr(volatile void * target,void * cmp,void * newval)69 atomic_cas_ptr(volatile void *target, void *cmp, void *newval)
70 {
71 return ((void *)atomic_cas_64((volatile uint64_t *)target,
72 (uint64_t)cmp, (uint64_t)newval));
73 }
74 #else /* _LP64 */
75 static __inline__ void *
atomic_cas_ptr(volatile void * target,void * cmp,void * newval)76 atomic_cas_ptr(volatile void *target, void *cmp, void *newval)
77 {
78 return ((void *)atomic_cas_32((volatile uint32_t *)target,
79 (uint32_t)cmp, (uint32_t)newval));
80 }
81 #endif /* _LP64 */
82
83 #endif /* _SPL_ATOMIC_H */
84