xref: /freebsd/sys/compat/linuxkpi/common/include/asm/atomic64.h (revision 273c26a3c3bea87a241d6879abd4f991db180bf0)
1 /*-
2  * Copyright (c) 2016 Mellanox Technologies, Ltd.
3  * All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice unmodified, this list of conditions, and the following
10  *    disclaimer.
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in the
13  *    documentation and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
16  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
17  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
18  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
19  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
20  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
21  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
22  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
24  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  *
26  * $FreeBSD$
27  */
28 #ifndef	_ASM_ATOMIC64_H_
29 #define	_ASM_ATOMIC64_H_
30 
31 #include <sys/cdefs.h>
32 #include <sys/types.h>
33 #include <machine/atomic.h>
34 
35 typedef struct {
36 	volatile int64_t counter;
37 } atomic64_t;
38 
39 /*------------------------------------------------------------------------*
40  *	64-bit atomic operations
41  *------------------------------------------------------------------------*/
42 
43 #define	atomic64_add(i, v)		atomic64_add_return((i), (v))
44 #define	atomic64_sub(i, v)		atomic64_sub_return((i), (v))
45 #define	atomic64_inc_return(v)		atomic64_add_return(1, (v))
46 #define	atomic64_add_negative(i, v)	(atomic64_add_return((i), (v)) < 0)
47 #define	atomic64_add_and_test(i, v)	(atomic64_add_return((i), (v)) == 0)
48 #define	atomic64_sub_and_test(i, v)	(atomic64_sub_return((i), (v)) == 0)
49 #define	atomic64_dec_and_test(v)	(atomic64_sub_return(1, (v)) == 0)
50 #define	atomic64_inc_and_test(v)	(atomic64_add_return(1, (v)) == 0)
51 #define	atomic64_dec_return(v)		atomic64_sub_return(1, (v))
52 #define	atomic64_inc_not_zero(v)	atomic64_add_unless((v), 1, 0)
53 
54 static inline int64_t
55 atomic64_add_return(int64_t i, atomic64_t *v)
56 {
57 	return i + atomic_fetchadd_64(&v->counter, i);
58 }
59 
60 static inline int64_t
61 atomic64_sub_return(int64_t i, atomic64_t *v)
62 {
63 	return atomic_fetchadd_64(&v->counter, -i) - i;
64 }
65 
66 static inline void
67 atomic64_set(atomic64_t *v, int64_t i)
68 {
69 	atomic_store_rel_64(&v->counter, i);
70 }
71 
72 static inline int64_t
73 atomic64_read(atomic64_t *v)
74 {
75 	return atomic_load_acq_64(&v->counter);
76 }
77 
78 static inline int64_t
79 atomic64_inc(atomic64_t *v)
80 {
81 	return atomic_fetchadd_64(&v->counter, 1) + 1;
82 }
83 
84 static inline int64_t
85 atomic64_dec(atomic64_t *v)
86 {
87 	return atomic_fetchadd_64(&v->counter, -1) - 1;
88 }
89 
90 static inline int64_t
91 atomic64_add_unless(atomic64_t *v, int64_t a, int64_t u)
92 {
93 	int64_t c;
94 
95 	for (;;) {
96 		c = atomic64_read(v);
97 		if (unlikely(c == u))
98 			break;
99 		if (likely(atomic_cmpset_64(&v->counter, c, c + a)))
100 			break;
101 	}
102 	return (c != u);
103 }
104 
105 static inline int64_t
106 atomic64_xchg(atomic64_t *v, int64_t i)
107 {
108 #if defined(__i386__) || defined(__amd64__) || \
109     defined(__arm__) || defined(__aarch64__)
110 	return (atomic_swap_64(&v->counter, i));
111 #else
112 	int64_t ret;
113 	for (;;) {
114 		ret = atomic_load_acq_64(&v->counter);
115 		if (atomic_cmpset_64(&v->counter, ret, i))
116 			break;
117 	}
118 	return (ret);
119 #endif
120 }
121 
122 static inline int64_t
123 atomic64_cmpxchg(atomic64_t *v, int64_t old, int64_t new)
124 {
125 	int64_t ret = old;
126 
127 	for (;;) {
128 		if (atomic_cmpset_64(&v->counter, old, new))
129 			break;
130 		ret = atomic_load_acq_64(&v->counter);
131 		if (ret != old)
132 			break;
133 	}
134 	return (ret);
135 }
136 
137 #endif					/* _ASM_ATOMIC64_H_ */
138