Lines Matching +full:3 +full:v
25 #define arch_atomic_read(v) READ_ONCE((v)->counter) argument
26 #define arch_atomic_set(v,i) WRITE_ONCE(((v)->counter), (i)) argument
37 static inline void arch_atomic_##op(int i, atomic_t *v) \
42 prefetchw(&v->counter); \
44 "1: ldrex %0, [%3]\n" \
46 " strex %1, %0, [%3]\n" \
49 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
50 : "r" (&v->counter), "Ir" (i) \
55 static inline int arch_atomic_##op##_return_relaxed(int i, atomic_t *v) \
60 prefetchw(&v->counter); \
63 "1: ldrex %0, [%3]\n" \
65 " strex %1, %0, [%3]\n" \
68 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
69 : "r" (&v->counter), "Ir" (i) \
76 static inline int arch_atomic_fetch_##op##_relaxed(int i, atomic_t *v) \
81 prefetchw(&v->counter); \
89 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \
90 : "r" (&v->counter), "Ir" (i) \
115 "ldrex %1, [%3]\n" in arch_atomic_cmpxchg_relaxed()
118 "strexeq %0, %5, [%3]\n" in arch_atomic_cmpxchg_relaxed()
128 static inline int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u) in arch_atomic_fetch_add_unless() argument
134 prefetchw(&v->counter); in arch_atomic_fetch_add_unless()
145 : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic_fetch_add_unless()
146 : "r" (&v->counter), "r" (u), "r" (a) in arch_atomic_fetch_add_unless()
163 static inline void arch_atomic_##op(int i, atomic_t *v) \
168 v->counter c_op i; \
173 static inline int arch_atomic_##op##_return(int i, atomic_t *v) \
179 v->counter c_op i; \
180 val = v->counter; \
187 static inline int arch_atomic_fetch_##op(int i, atomic_t *v) \
193 val = v->counter; \
194 v->counter c_op i; \
210 static inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new) in arch_atomic_cmpxchg() argument
216 ret = v->counter; in arch_atomic_cmpxchg()
218 v->counter = new; in arch_atomic_cmpxchg()
260 static inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read() argument
267 : "r" (&v->counter), "Qo" (v->counter) in arch_atomic64_read()
273 static inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set() argument
277 : "=Qo" (v->counter) in arch_atomic64_set()
278 : "r" (&v->counter), "r" (i) in arch_atomic64_set()
282 static inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read() argument
289 : "r" (&v->counter), "Qo" (v->counter) in arch_atomic64_read()
295 static inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set() argument
299 prefetchw(&v->counter); in arch_atomic64_set()
302 " strexd %0, %3, %H3, [%2]\n" in arch_atomic64_set()
305 : "=&r" (tmp), "=Qo" (v->counter) in arch_atomic64_set()
306 : "r" (&v->counter), "r" (i) in arch_atomic64_set()
312 static inline void arch_atomic64_##op(s64 i, atomic64_t *v) \
317 prefetchw(&v->counter); \
319 "1: ldrexd %0, %H0, [%3]\n" \
322 " strexd %1, %0, %H0, [%3]\n" \
325 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
326 : "r" (&v->counter), "r" (i) \
332 arch_atomic64_##op##_return_relaxed(s64 i, atomic64_t *v) \
337 prefetchw(&v->counter); \
340 "1: ldrexd %0, %H0, [%3]\n" \
343 " strexd %1, %0, %H0, [%3]\n" \
346 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
347 : "r" (&v->counter), "r" (i) \
355 arch_atomic64_fetch_##op##_relaxed(s64 i, atomic64_t *v) \
360 prefetchw(&v->counter); \
369 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \
370 : "r" (&v->counter), "r" (i) \
420 "ldrexd %1, %H1, [%3]\n" in ATOMIC64_OPS()
424 "strexdeq %0, %5, %H5, [%3]" in ATOMIC64_OPS()
442 "1: ldrexd %0, %H0, [%3]\n" in arch_atomic64_xchg_relaxed()
443 " strexd %1, %4, %H4, [%3]\n" in arch_atomic64_xchg_relaxed()
454 static inline s64 arch_atomic64_dec_if_positive(atomic64_t *v) in arch_atomic64_dec_if_positive() argument
460 prefetchw(&v->counter); in arch_atomic64_dec_if_positive()
463 "1: ldrexd %0, %H0, [%3]\n" in arch_atomic64_dec_if_positive()
468 " strexd %1, %0, %H0, [%3]\n" in arch_atomic64_dec_if_positive()
472 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic64_dec_if_positive()
473 : "r" (&v->counter) in arch_atomic64_dec_if_positive()
482 static inline s64 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in arch_atomic64_fetch_add_unless() argument
488 prefetchw(&v->counter); in arch_atomic64_fetch_add_unless()
501 : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic64_fetch_add_unless()
502 : "r" (&v->counter), "r" (u), "r" (a) in arch_atomic64_fetch_add_unless()