xref: /freebsd/sys/compat/linuxkpi/common/include/asm/atomic-long.h (revision d5b0e70f7e04d971691517ce1304d86a1e367e2e)
1 /*-
2  * Copyright (c) 2010 Isilon Systems, Inc.
3  * Copyright (c) 2010 iX Systems, Inc.
4  * Copyright (c) 2010 Panasas, Inc.
5  * Copyright (c) 2013-2017 Mellanox Technologies, Ltd.
6  * All rights reserved.
7  *
8  * Redistribution and use in source and binary forms, with or without
9  * modification, are permitted provided that the following conditions
10  * are met:
11  * 1. Redistributions of source code must retain the above copyright
12  *    notice unmodified, this list of conditions, and the following
13  *    disclaimer.
14  * 2. Redistributions in binary form must reproduce the above copyright
15  *    notice, this list of conditions and the following disclaimer in the
16  *    documentation and/or other materials provided with the distribution.
17  *
18  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28  *
29  * $FreeBSD$
30  */
31 #ifndef	_LINUXKPI_ASM_ATOMIC_LONG_H_
32 #define	_LINUXKPI_ASM_ATOMIC_LONG_H_
33 
34 #include <linux/compiler.h>
35 #include <sys/types.h>
36 #include <machine/atomic.h>
37 #define	ATOMIC_LONG_INIT(x)	{ .counter = (x) }
38 
39 typedef struct {
40 	volatile long counter;
41 } atomic_long_t;
42 
43 #define	atomic_long_add(i, v)		atomic_long_add_return((i), (v))
44 #define	atomic_long_sub(i, v)		atomic_long_add_return(-(i), (v))
45 #define	atomic_long_inc_return(v)	atomic_long_add_return(1, (v))
46 #define	atomic_long_inc_not_zero(v)	atomic_long_add_unless((v), 1, 0)
47 
48 static inline long
49 atomic_long_add_return(long i, atomic_long_t *v)
50 {
51 	return i + atomic_fetchadd_long(&v->counter, i);
52 }
53 
54 static inline void
55 atomic_long_set(atomic_long_t *v, long i)
56 {
57 	WRITE_ONCE(v->counter, i);
58 }
59 
60 static inline long
61 atomic_long_read(atomic_long_t *v)
62 {
63 	return READ_ONCE(v->counter);
64 }
65 
66 static inline long
67 atomic_long_inc(atomic_long_t *v)
68 {
69 	return atomic_fetchadd_long(&v->counter, 1) + 1;
70 }
71 
72 static inline long
73 atomic_long_dec(atomic_long_t *v)
74 {
75 	return atomic_fetchadd_long(&v->counter, -1) - 1;
76 }
77 
78 static inline long
79 atomic_long_xchg(atomic_long_t *v, long val)
80 {
81 	return atomic_swap_long(&v->counter, val);
82 }
83 
84 static inline long
85 atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
86 {
87 	long ret = old;
88 
89 	for (;;) {
90 		if (atomic_fcmpset_long(&v->counter, &ret, new))
91 			break;
92 		if (ret != old)
93 			break;
94 	}
95 	return (ret);
96 }
97 
98 static inline int
99 atomic_long_add_unless(atomic_long_t *v, long a, long u)
100 {
101 	long c = atomic_long_read(v);
102 
103 	for (;;) {
104 		if (unlikely(c == u))
105 			break;
106 		if (likely(atomic_fcmpset_long(&v->counter, &c, c + a)))
107 			break;
108 	}
109 	return (c != u);
110 }
111 
112 static inline long
113 atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
114 {
115 	long c = atomic_long_read(v);
116 
117 	for (;;) {
118 		if (unlikely(c == u))
119 			break;
120 		if (likely(atomic_fcmpset_long(&v->counter, &c, c + a)))
121 			break;
122 	}
123 	return (c);
124 }
125 
126 static inline long
127 atomic_long_dec_and_test(atomic_long_t *v)
128 {
129 	long i = atomic_long_add(-1, v);
130 	return i == 0 ;
131 }
132 
133 #endif	/* _LINUXKPI_ASM_ATOMIC_LONG_H_ */
134