1/* atomic.S: These things are too big to do inline. 2 * 3 * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net) 4 */ 5 6#include <linux/linkage.h> 7#include <asm/asi.h> 8#include <asm/backoff.h> 9#include <asm/export.h> 10 11 .text 12 13 /* Three versions of the atomic routines, one that 14 * does not return a value and does not perform 15 * memory barriers, and a two which return 16 * a value, the new and old value resp. and does the 17 * barriers. 18 */ 19 20#define ATOMIC_OP(op) \ 21ENTRY(atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ 22 BACKOFF_SETUP(%o2); \ 231: lduw [%o1], %g1; \ 24 op %g1, %o0, %g7; \ 25 cas [%o1], %g1, %g7; \ 26 cmp %g1, %g7; \ 27 bne,pn %icc, BACKOFF_LABEL(2f, 1b); \ 28 nop; \ 29 retl; \ 30 nop; \ 312: BACKOFF_SPIN(%o2, %o3, 1b); \ 32ENDPROC(atomic_##op); \ 33EXPORT_SYMBOL(atomic_##op); 34 35#define ATOMIC_OP_RETURN(op) \ 36ENTRY(atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \ 37 BACKOFF_SETUP(%o2); \ 381: lduw [%o1], %g1; \ 39 op %g1, %o0, %g7; \ 40 cas [%o1], %g1, %g7; \ 41 cmp %g1, %g7; \ 42 bne,pn %icc, BACKOFF_LABEL(2f, 1b); \ 43 op %g1, %o0, %g1; \ 44 retl; \ 45 sra %g1, 0, %o0; \ 462: BACKOFF_SPIN(%o2, %o3, 1b); \ 47ENDPROC(atomic_##op##_return); \ 48EXPORT_SYMBOL(atomic_##op##_return); 49 50#define ATOMIC_FETCH_OP(op) \ 51ENTRY(atomic_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ 52 BACKOFF_SETUP(%o2); \ 531: lduw [%o1], %g1; \ 54 op %g1, %o0, %g7; \ 55 cas [%o1], %g1, %g7; \ 56 cmp %g1, %g7; \ 57 bne,pn %icc, BACKOFF_LABEL(2f, 1b); \ 58 nop; \ 59 retl; \ 60 sra %g1, 0, %o0; \ 612: BACKOFF_SPIN(%o2, %o3, 1b); \ 62ENDPROC(atomic_fetch_##op); \ 63EXPORT_SYMBOL(atomic_fetch_##op); 64 65ATOMIC_OP(add) 66ATOMIC_OP_RETURN(add) 67ATOMIC_FETCH_OP(add) 68 69ATOMIC_OP(sub) 70ATOMIC_OP_RETURN(sub) 71ATOMIC_FETCH_OP(sub) 72 73ATOMIC_OP(and) 74ATOMIC_FETCH_OP(and) 75 76ATOMIC_OP(or) 77ATOMIC_FETCH_OP(or) 78 79ATOMIC_OP(xor) 80ATOMIC_FETCH_OP(xor) 81 82#undef ATOMIC_FETCH_OP 83#undef ATOMIC_OP_RETURN 84#undef ATOMIC_OP 85 86#define ATOMIC64_OP(op) \ 87ENTRY(atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ 88 BACKOFF_SETUP(%o2); \ 891: ldx [%o1], %g1; \ 90 op %g1, %o0, %g7; \ 91 casx [%o1], %g1, %g7; \ 92 cmp %g1, %g7; \ 93 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \ 94 nop; \ 95 retl; \ 96 nop; \ 972: BACKOFF_SPIN(%o2, %o3, 1b); \ 98ENDPROC(atomic64_##op); \ 99EXPORT_SYMBOL(atomic64_##op); 100 101#define ATOMIC64_OP_RETURN(op) \ 102ENTRY(atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \ 103 BACKOFF_SETUP(%o2); \ 1041: ldx [%o1], %g1; \ 105 op %g1, %o0, %g7; \ 106 casx [%o1], %g1, %g7; \ 107 cmp %g1, %g7; \ 108 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \ 109 nop; \ 110 retl; \ 111 op %g1, %o0, %o0; \ 1122: BACKOFF_SPIN(%o2, %o3, 1b); \ 113ENDPROC(atomic64_##op##_return); \ 114EXPORT_SYMBOL(atomic64_##op##_return); 115 116#define ATOMIC64_FETCH_OP(op) \ 117ENTRY(atomic64_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ 118 BACKOFF_SETUP(%o2); \ 1191: ldx [%o1], %g1; \ 120 op %g1, %o0, %g7; \ 121 casx [%o1], %g1, %g7; \ 122 cmp %g1, %g7; \ 123 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \ 124 nop; \ 125 retl; \ 126 mov %g1, %o0; \ 1272: BACKOFF_SPIN(%o2, %o3, 1b); \ 128ENDPROC(atomic64_fetch_##op); \ 129EXPORT_SYMBOL(atomic64_fetch_##op); 130 131ATOMIC64_OP(add) 132ATOMIC64_OP_RETURN(add) 133ATOMIC64_FETCH_OP(add) 134 135ATOMIC64_OP(sub) 136ATOMIC64_OP_RETURN(sub) 137ATOMIC64_FETCH_OP(sub) 138 139ATOMIC64_OP(and) 140ATOMIC64_FETCH_OP(and) 141 142ATOMIC64_OP(or) 143ATOMIC64_FETCH_OP(or) 144 145ATOMIC64_OP(xor) 146ATOMIC64_FETCH_OP(xor) 147 148#undef ATOMIC64_FETCH_OP 149#undef ATOMIC64_OP_RETURN 150#undef ATOMIC64_OP 151 152ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */ 153 BACKOFF_SETUP(%o2) 1541: ldx [%o0], %g1 155 brlez,pn %g1, 3f 156 sub %g1, 1, %g7 157 casx [%o0], %g1, %g7 158 cmp %g1, %g7 159 bne,pn %xcc, BACKOFF_LABEL(2f, 1b) 160 nop 1613: retl 162 sub %g1, 1, %o0 1632: BACKOFF_SPIN(%o2, %o3, 1b) 164ENDPROC(atomic64_dec_if_positive) 165EXPORT_SYMBOL(atomic64_dec_if_positive) 166