xref: /freebsd/sys/compat/linuxkpi/common/include/asm/atomic-long.h (revision 95ee2897e98f5d444f26ed2334cc7c439f9c16c6)
1  /*-
2   * Copyright (c) 2010 Isilon Systems, Inc.
3   * Copyright (c) 2010 iX Systems, Inc.
4   * Copyright (c) 2010 Panasas, Inc.
5   * Copyright (c) 2013-2017 Mellanox Technologies, Ltd.
6   * All rights reserved.
7   *
8   * Redistribution and use in source and binary forms, with or without
9   * modification, are permitted provided that the following conditions
10   * are met:
11   * 1. Redistributions of source code must retain the above copyright
12   *    notice unmodified, this list of conditions, and the following
13   *    disclaimer.
14   * 2. Redistributions in binary form must reproduce the above copyright
15   *    notice, this list of conditions and the following disclaimer in the
16   *    documentation and/or other materials provided with the distribution.
17   *
18   * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19   * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21   * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22   * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23   * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24   * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25   * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26   * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27   * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28   */
29  #ifndef	_LINUXKPI_ASM_ATOMIC_LONG_H_
30  #define	_LINUXKPI_ASM_ATOMIC_LONG_H_
31  
32  #include <linux/compiler.h>
33  #include <sys/types.h>
34  #include <machine/atomic.h>
35  #define	ATOMIC_LONG_INIT(x)	{ .counter = (x) }
36  
37  typedef struct {
38  	volatile long counter;
39  } atomic_long_t;
40  
41  #define	atomic_long_add(i, v)		atomic_long_add_return((i), (v))
42  #define	atomic_long_sub(i, v)		atomic_long_sub_return((i), (v))
43  #define	atomic_long_inc_return(v)	atomic_long_add_return(1, (v))
44  #define	atomic_long_inc_not_zero(v)	atomic_long_add_unless((v), 1, 0)
45  
46  static inline long
atomic_long_add_return(long i,atomic_long_t * v)47  atomic_long_add_return(long i, atomic_long_t *v)
48  {
49  	return i + atomic_fetchadd_long(&v->counter, i);
50  }
51  
52  static inline long
atomic_long_sub_return(long i,atomic_long_t * v)53  atomic_long_sub_return(long i, atomic_long_t *v)
54  {
55  	return atomic_fetchadd_long(&v->counter, -i) - i;
56  }
57  
58  static inline void
atomic_long_set(atomic_long_t * v,long i)59  atomic_long_set(atomic_long_t *v, long i)
60  {
61  	WRITE_ONCE(v->counter, i);
62  }
63  
64  static inline long
atomic_long_read(atomic_long_t * v)65  atomic_long_read(atomic_long_t *v)
66  {
67  	return READ_ONCE(v->counter);
68  }
69  
70  static inline long
atomic_long_inc(atomic_long_t * v)71  atomic_long_inc(atomic_long_t *v)
72  {
73  	return atomic_fetchadd_long(&v->counter, 1) + 1;
74  }
75  
76  static inline long
atomic_long_dec(atomic_long_t * v)77  atomic_long_dec(atomic_long_t *v)
78  {
79  	return atomic_fetchadd_long(&v->counter, -1) - 1;
80  }
81  
82  static inline long
atomic_long_xchg(atomic_long_t * v,long val)83  atomic_long_xchg(atomic_long_t *v, long val)
84  {
85  	return atomic_swap_long(&v->counter, val);
86  }
87  
88  static inline long
atomic_long_cmpxchg(atomic_long_t * v,long old,long new)89  atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
90  {
91  	long ret = old;
92  
93  	for (;;) {
94  		if (atomic_fcmpset_long(&v->counter, &ret, new))
95  			break;
96  		if (ret != old)
97  			break;
98  	}
99  	return (ret);
100  }
101  
102  static inline int
atomic_long_add_unless(atomic_long_t * v,long a,long u)103  atomic_long_add_unless(atomic_long_t *v, long a, long u)
104  {
105  	long c = atomic_long_read(v);
106  
107  	for (;;) {
108  		if (unlikely(c == u))
109  			break;
110  		if (likely(atomic_fcmpset_long(&v->counter, &c, c + a)))
111  			break;
112  	}
113  	return (c != u);
114  }
115  
116  static inline long
atomic_long_fetch_add_unless(atomic_long_t * v,long a,long u)117  atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
118  {
119  	long c = atomic_long_read(v);
120  
121  	for (;;) {
122  		if (unlikely(c == u))
123  			break;
124  		if (likely(atomic_fcmpset_long(&v->counter, &c, c + a)))
125  			break;
126  	}
127  	return (c);
128  }
129  
130  static inline long
atomic_long_dec_and_test(atomic_long_t * v)131  atomic_long_dec_and_test(atomic_long_t *v)
132  {
133  	long i = atomic_long_add(-1, v);
134  	return i == 0 ;
135  }
136  
137  #endif	/* _LINUXKPI_ASM_ATOMIC_LONG_H_ */
138