/linux/arch/loongarch/include/asm/ |
H A D | atomic.h | 37 #define ATOMIC_OP(op, I, asm_op) \ argument 41 "am"#asm_op".w" " $zero, %1, %0 \n" \ 47 #define ATOMIC_OP_RETURN(op, I, asm_op, c_op, mb, suffix) \ argument 53 "am"#asm_op#mb".w" " %1, %2, %0 \n" \ 61 #define ATOMIC_FETCH_OP(op, I, asm_op, mb, suffix) \ argument 67 "am"#asm_op#mb".w" " %1, %2, %0 \n" \ 75 #define ATOMIC_OPS(op, I, asm_op, c_op) \ argument 76 ATOMIC_OP(op, I, asm_op) \ 77 ATOMIC_OP_RETURN(op, I, asm_op, c_op, _db, ) \ 78 ATOMIC_OP_RETURN(op, I, asm_op, c_op, , _relaxed) \ [all …]
|
H A D | percpu.h | 39 #define PERCPU_OP(op, asm_op, c_op) \ argument 48 "am"#asm_op".w" " %[ret], %[val], %[ptr] \n" \ 54 "am"#asm_op".d" " %[ret], %[val], %[ptr] \n" \
|
/linux/arch/arc/include/asm/ |
H A D | atomic-llsc.h | 8 #define ATOMIC_OP(op, asm_op) \ argument 15 " " #asm_op " %[val], %[val], %[i] \n" \ 24 #define ATOMIC_OP_RETURN(op, asm_op) \ argument 31 " " #asm_op " %[val], %[val], %[i] \n" \ 45 #define ATOMIC_FETCH_OP(op, asm_op) \ argument 52 " " #asm_op " %[val], %[orig], %[i] \n" \ 72 #define ATOMIC_OPS(op, asm_op) \ argument 73 ATOMIC_OP(op, asm_op) \ 74 ATOMIC_OP_RETURN(op, asm_op) \ 75 ATOMIC_FETCH_OP(op, asm_op) [all …]
|
H A D | atomic-spinlock.h | 31 #define ATOMIC_OP(op, c_op, asm_op) \ argument 41 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \ argument 59 #define ATOMIC_FETCH_OP(op, c_op, asm_op) \ argument 76 #define ATOMIC_OPS(op, c_op, asm_op) \ argument 77 ATOMIC_OP(op, c_op, asm_op) \ 78 ATOMIC_OP_RETURN(op, c_op, asm_op) \ 79 ATOMIC_FETCH_OP(op, c_op, asm_op) 90 #define ATOMIC_OPS(op, c_op, asm_op) \ argument 91 ATOMIC_OP(op, c_op, asm_op) \ 92 ATOMIC_FETCH_OP(op, c_op, asm_op)
|
/linux/arch/powerpc/include/asm/ |
H A D | atomic.h | 49 #define ATOMIC_OP(op, asm_op, suffix, sign, ...) \ argument 56 #asm_op "%I2" suffix " %0,%0,%2\n" \ 64 #define ATOMIC_OP_RETURN_RELAXED(op, asm_op, suffix, sign, ...) \ argument 71 #asm_op "%I2" suffix " %0,%0,%2\n" \ 81 #define ATOMIC_FETCH_OP_RELAXED(op, asm_op, suffix, sign, ...) \ argument 88 #asm_op "%I3" suffix " %1,%0,%3\n" \ 98 #define ATOMIC_OPS(op, asm_op, suffix, sign, ...) \ argument 99 ATOMIC_OP(op, asm_op, suffix, sign, ##__VA_ARGS__) \ 100 ATOMIC_OP_RETURN_RELAXED(op, asm_op, suffix, sign, ##__VA_ARGS__)\ 101 ATOMIC_FETCH_OP_RELAXED(op, asm_op, suffix, sign, ##__VA_ARGS__) [all …]
|
/linux/arch/m68k/include/asm/ |
H A D | atomic.h | 32 #define ATOMIC_OP(op, c_op, asm_op) \ argument 35 __asm__ __volatile__(#asm_op "l %1,%0" : "+m" (*v) : ASM_DI (i));\ 40 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \ argument 47 " " #asm_op "l %3,%1\n" \ 55 #define ATOMIC_FETCH_OP(op, c_op, asm_op) \ argument 62 " " #asm_op "l %3,%1\n" \ 72 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \ argument 85 #define ATOMIC_FETCH_OP(op, c_op, asm_op) \ argument 101 #define ATOMIC_OPS(op, c_op, asm_op) \ argument 102 ATOMIC_OP(op, c_op, asm_op) \ [all …]
|
/linux/arch/alpha/include/asm/ |
H A D | atomic.h | 41 #define ATOMIC_OP(op, asm_op) \ argument 47 " " #asm_op " %0,%2,%0\n" \ 57 #define ATOMIC_OP_RETURN(op, asm_op) \ argument 63 " " #asm_op " %0,%3,%2\n" \ 64 " " #asm_op " %0,%3,%0\n" \ 76 #define ATOMIC_FETCH_OP(op, asm_op) \ argument 82 " " #asm_op " %2,%3,%0\n" \ 94 #define ATOMIC64_OP(op, asm_op) \ argument 100 " " #asm_op " %0,%2,%0\n" \ 110 #define ATOMIC64_OP_RETURN(op, asm_op) \ argument [all …]
|
/linux/arch/arm64/include/asm/ |
H A D | atomic_lse.h | 13 #define ATOMIC_OP(op, asm_op) \ argument 19 " " #asm_op " %w[i], %[v]\n" \ 36 #define ATOMIC_FETCH_OP(name, mb, op, asm_op, cl...) \ argument 44 " " #asm_op #mb " %w[i], %w[old], %[v]" \ 53 #define ATOMIC_FETCH_OPS(op, asm_op) \ argument 54 ATOMIC_FETCH_OP(_relaxed, , op, asm_op) \ 55 ATOMIC_FETCH_OP(_acquire, a, op, asm_op, "memory") \ 56 ATOMIC_FETCH_OP(_release, l, op, asm_op, "memory") \ 57 ATOMIC_FETCH_OP( , al, op, asm_op, "memory") 120 #define ATOMIC64_OP(op, asm_op) \ argument [all …]
|
H A D | atomic_ll_sc.h | 25 #define ATOMIC_OP(op, asm_op, constraint) \ argument 35 " " #asm_op " %w0, %w0, %w3\n" \ 42 #define ATOMIC_OP_RETURN(name, mb, acq, rel, cl, op, asm_op, constraint)\ argument 52 " " #asm_op " %w0, %w0, %w3\n" \ 63 #define ATOMIC_FETCH_OP(name, mb, acq, rel, cl, op, asm_op, constraint) \ argument 73 " " #asm_op " %w1, %w0, %w4\n" \ 121 #define ATOMIC64_OP(op, asm_op, constraint) \ in ATOMIC_OPS() argument 131 " " #asm_op " %0, %0, %3\n" \ in ATOMIC_OPS() 138 #define ATOMIC64_OP_RETURN(name, mb, acq, rel, cl, op, asm_op, constraint)\ argument 148 " " #asm_op " %0, %0, %3\n" \ [all …]
|
/linux/arch/mips/include/asm/ |
H A D | atomic.h | 44 #define ATOMIC_OP(pfx, op, type, c_op, asm_op, ll, sc) \ argument 63 " " #asm_op " %0, %2 \n" \ 71 #define ATOMIC_OP_RETURN(pfx, op, type, c_op, asm_op, ll, sc) \ argument 93 " " #asm_op " %0, %1, %3 \n" \ 96 " " #asm_op " %0, %1, %3 \n" \ 105 #define ATOMIC_FETCH_OP(pfx, op, type, c_op, asm_op, ll, sc) \ argument 126 " " #asm_op " %0, %1, %3 \n" \ 139 #define ATOMIC_OPS(pfx, op, type, c_op, asm_op, ll, sc) \ argument 140 ATOMIC_OP(pfx, op, type, c_op, asm_op, ll, sc) \ 141 ATOMIC_OP_RETURN(pfx, op, type, c_op, asm_op, ll, sc) \ [all …]
|
/linux/arch/arm/include/asm/ |
H A D | atomic.h | 36 #define ATOMIC_OP(op, c_op, asm_op) \ argument 45 " " #asm_op " %0, %0, %4\n" \ 54 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \ argument 64 " " #asm_op " %0, %0, %4\n" \ 75 #define ATOMIC_FETCH_OP(op, c_op, asm_op) \ argument 85 " " #asm_op " %1, %0, %5\n" \ 162 #define ATOMIC_OP(op, c_op, asm_op) \ argument 172 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \ argument 186 #define ATOMIC_FETCH_OP(op, c_op, asm_op) \ argument 227 #define ATOMIC_OPS(op, c_op, asm_op) \ argument [all …]
|