Lines Matching +full:adc +full:- +full:use +full:- +full:res
1 /* SPDX-License-Identifier: GPL-2.0-only */
22 * strex/ldrex monitor on some implementations. The reason we can use it for
25 #define arch_atomic_read(v) READ_ONCE((v)->counter)
26 #define arch_atomic_set(v,i) WRITE_ONCE(((v)->counter), (i))
31 * ARMv6 UP and SMP safe atomic ops. We use load exclusive and
42 prefetchw(&v->counter); \
49 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
50 : "r" (&v->counter), "Ir" (i) \
60 prefetchw(&v->counter); \
68 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
69 : "r" (&v->counter), "Ir" (i) \
81 prefetchw(&v->counter); \
89 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \
90 : "r" (&v->counter), "Ir" (i) \
109 unsigned long res; in arch_atomic_cmpxchg_relaxed() local
111 prefetchw(&ptr->counter); in arch_atomic_cmpxchg_relaxed()
119 : "=&r" (res), "=&r" (oldval), "+Qo" (ptr->counter) in arch_atomic_cmpxchg_relaxed()
120 : "r" (&ptr->counter), "Ir" (old), "r" (new) in arch_atomic_cmpxchg_relaxed()
122 } while (res); in arch_atomic_cmpxchg_relaxed()
134 prefetchw(&v->counter); in arch_atomic_fetch_add_unless()
145 : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic_fetch_add_unless()
146 : "r" (&v->counter), "r" (u), "r" (a) in arch_atomic_fetch_add_unless()
159 #error SMP not supported on pre-ARMv6 CPUs
168 v->counter c_op i; \
179 v->counter c_op i; \
180 val = v->counter; \
193 val = v->counter; \
194 v->counter c_op i; \
216 ret = v->counter; in arch_atomic_cmpxchg()
218 v->counter = new; in arch_atomic_cmpxchg()
233 ATOMIC_OPS(sub, -=, sub)
267 : "r" (&v->counter), "Qo" (v->counter) in arch_atomic64_read()
277 : "=Qo" (v->counter) in arch_atomic64_set()
278 : "r" (&v->counter), "r" (i) in arch_atomic64_set()
289 : "r" (&v->counter), "Qo" (v->counter) in arch_atomic64_read()
299 prefetchw(&v->counter); in arch_atomic64_set()
305 : "=&r" (tmp), "=Qo" (v->counter) in arch_atomic64_set()
306 : "r" (&v->counter), "r" (i) in arch_atomic64_set()
317 prefetchw(&v->counter); \
325 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
326 : "r" (&v->counter), "r" (i) \
337 prefetchw(&v->counter); \
346 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
347 : "r" (&v->counter), "r" (i) \
360 prefetchw(&v->counter); \
369 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \
370 : "r" (&v->counter), "r" (i) \
381 ATOMIC64_OPS(add, adds, adc) in ATOMIC64_OPS() argument
414 unsigned long res; in ATOMIC64_OPS() local
416 prefetchw(&ptr->counter); in ATOMIC64_OPS()
425 : "=&r" (res), "=&r" (oldval), "+Qo" (ptr->counter) in ATOMIC64_OPS()
426 : "r" (&ptr->counter), "r" (old), "r" (new) in ATOMIC64_OPS()
428 } while (res); in ATOMIC64_OPS()
439 prefetchw(&ptr->counter); in arch_atomic64_xchg_relaxed()
446 : "=&r" (result), "=&r" (tmp), "+Qo" (ptr->counter) in arch_atomic64_xchg_relaxed()
447 : "r" (&ptr->counter), "r" (new) in arch_atomic64_xchg_relaxed()
460 prefetchw(&v->counter); in arch_atomic64_dec_if_positive()
472 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic64_dec_if_positive()
473 : "r" (&v->counter) in arch_atomic64_dec_if_positive()
488 prefetchw(&v->counter); in arch_atomic64_fetch_add_unless()
496 " adc %R1, %R0, %R6\n" in arch_atomic64_fetch_add_unless()
501 : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic64_fetch_add_unless()
502 : "r" (&v->counter), "r" (u), "r" (a) in arch_atomic64_fetch_add_unless()