1 #ifndef _ASM_X86_LOCAL_H 2 #define _ASM_X86_LOCAL_H 3 4 #include <linux/percpu.h> 5 6 #include <linux/atomic.h> 7 #include <asm/asm.h> 8 9 typedef struct { 10 atomic_long_t a; 11 } local_t; 12 13 #define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) } 14 15 #define local_read(l) atomic_long_read(&(l)->a) 16 #define local_set(l, i) atomic_long_set(&(l)->a, (i)) 17 18 static inline void local_inc(local_t *l) 19 { 20 asm volatile(_ASM_INC "%0" 21 : "+m" (l->a.counter)); 22 } 23 24 static inline void local_dec(local_t *l) 25 { 26 asm volatile(_ASM_DEC "%0" 27 : "+m" (l->a.counter)); 28 } 29 30 static inline void local_add(long i, local_t *l) 31 { 32 asm volatile(_ASM_ADD "%1,%0" 33 : "+m" (l->a.counter) 34 : "ir" (i)); 35 } 36 37 static inline void local_sub(long i, local_t *l) 38 { 39 asm volatile(_ASM_SUB "%1,%0" 40 : "+m" (l->a.counter) 41 : "ir" (i)); 42 } 43 44 /** 45 * local_sub_and_test - subtract value from variable and test result 46 * @i: integer value to subtract 47 * @l: pointer to type local_t 48 * 49 * Atomically subtracts @i from @l and returns 50 * true if the result is zero, or false for all 51 * other cases. 52 */ 53 static inline int local_sub_and_test(long i, local_t *l) 54 { 55 unsigned char c; 56 57 asm volatile(_ASM_SUB "%2,%0; sete %1" 58 : "+m" (l->a.counter), "=qm" (c) 59 : "ir" (i) : "memory"); 60 return c; 61 } 62 63 /** 64 * local_dec_and_test - decrement and test 65 * @l: pointer to type local_t 66 * 67 * Atomically decrements @l by 1 and 68 * returns true if the result is 0, or false for all other 69 * cases. 70 */ 71 static inline int local_dec_and_test(local_t *l) 72 { 73 unsigned char c; 74 75 asm volatile(_ASM_DEC "%0; sete %1" 76 : "+m" (l->a.counter), "=qm" (c) 77 : : "memory"); 78 return c != 0; 79 } 80 81 /** 82 * local_inc_and_test - increment and test 83 * @l: pointer to type local_t 84 * 85 * Atomically increments @l by 1 86 * and returns true if the result is zero, or false for all 87 * other cases. 88 */ 89 static inline int local_inc_and_test(local_t *l) 90 { 91 unsigned char c; 92 93 asm volatile(_ASM_INC "%0; sete %1" 94 : "+m" (l->a.counter), "=qm" (c) 95 : : "memory"); 96 return c != 0; 97 } 98 99 /** 100 * local_add_negative - add and test if negative 101 * @i: integer value to add 102 * @l: pointer to type local_t 103 * 104 * Atomically adds @i to @l and returns true 105 * if the result is negative, or false when 106 * result is greater than or equal to zero. 107 */ 108 static inline int local_add_negative(long i, local_t *l) 109 { 110 unsigned char c; 111 112 asm volatile(_ASM_ADD "%2,%0; sets %1" 113 : "+m" (l->a.counter), "=qm" (c) 114 : "ir" (i) : "memory"); 115 return c; 116 } 117 118 /** 119 * local_add_return - add and return 120 * @i: integer value to add 121 * @l: pointer to type local_t 122 * 123 * Atomically adds @i to @l and returns @i + @l 124 */ 125 static inline long local_add_return(long i, local_t *l) 126 { 127 long __i = i; 128 asm volatile(_ASM_XADD "%0, %1;" 129 : "+r" (i), "+m" (l->a.counter) 130 : : "memory"); 131 return i + __i; 132 } 133 134 static inline long local_sub_return(long i, local_t *l) 135 { 136 return local_add_return(-i, l); 137 } 138 139 #define local_inc_return(l) (local_add_return(1, l)) 140 #define local_dec_return(l) (local_sub_return(1, l)) 141 142 #define local_cmpxchg(l, o, n) \ 143 (cmpxchg_local(&((l)->a.counter), (o), (n))) 144 /* Always has a lock prefix */ 145 #define local_xchg(l, n) (xchg(&((l)->a.counter), (n))) 146 147 /** 148 * local_add_unless - add unless the number is a given value 149 * @l: pointer of type local_t 150 * @a: the amount to add to l... 151 * @u: ...unless l is equal to u. 152 * 153 * Atomically adds @a to @l, so long as it was not @u. 154 * Returns non-zero if @l was not @u, and zero otherwise. 155 */ 156 #define local_add_unless(l, a, u) \ 157 ({ \ 158 long c, old; \ 159 c = local_read((l)); \ 160 for (;;) { \ 161 if (unlikely(c == (u))) \ 162 break; \ 163 old = local_cmpxchg((l), c, c + (a)); \ 164 if (likely(old == c)) \ 165 break; \ 166 c = old; \ 167 } \ 168 c != (u); \ 169 }) 170 #define local_inc_not_zero(l) local_add_unless((l), 1, 0) 171 172 /* On x86_32, these are no better than the atomic variants. 173 * On x86-64 these are better than the atomic variants on SMP kernels 174 * because they dont use a lock prefix. 175 */ 176 #define __local_inc(l) local_inc(l) 177 #define __local_dec(l) local_dec(l) 178 #define __local_add(i, l) local_add((i), (l)) 179 #define __local_sub(i, l) local_sub((i), (l)) 180 181 #endif /* _ASM_X86_LOCAL_H */ 182