xref: /linux/arch/sparc/lib/atomic_64.S (revision 80d443e8876602be2c130f79c4de81e12e2a700d)
1/* atomic.S: These things are too big to do inline.
2 *
3 * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net)
4 */
5
6#include <linux/linkage.h>
7#include <asm/asi.h>
8#include <asm/backoff.h>
9#include <asm/export.h>
10
11	.text
12
13	/* Three versions of the atomic routines, one that
14	 * does not return a value and does not perform
15	 * memory barriers, and a two which return
16	 * a value, the new and old value resp. and does the
17	 * barriers.
18	 */
19
20#define ATOMIC_OP(op)							\
21ENTRY(atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */		\
22	BACKOFF_SETUP(%o2);						\
231:	lduw	[%o1], %g1;						\
24	op	%g1, %o0, %g7;						\
25	cas	[%o1], %g1, %g7;					\
26	cmp	%g1, %g7;						\
27	bne,pn	%icc, BACKOFF_LABEL(2f, 1b);				\
28	 nop;								\
29	retl;								\
30	 nop;								\
312:	BACKOFF_SPIN(%o2, %o3, 1b);					\
32ENDPROC(atomic_##op);							\
33EXPORT_SYMBOL(atomic_##op);
34
35#define ATOMIC_OP_RETURN(op)						\
36ENTRY(atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */	\
37	BACKOFF_SETUP(%o2);						\
381:	lduw	[%o1], %g1;						\
39	op	%g1, %o0, %g7;						\
40	cas	[%o1], %g1, %g7;					\
41	cmp	%g1, %g7;						\
42	bne,pn	%icc, BACKOFF_LABEL(2f, 1b);				\
43	 op	%g1, %o0, %g1;						\
44	retl;								\
45	 sra	%g1, 0, %o0;						\
462:	BACKOFF_SPIN(%o2, %o3, 1b);					\
47ENDPROC(atomic_##op##_return);						\
48EXPORT_SYMBOL(atomic_##op##_return);
49
50#define ATOMIC_FETCH_OP(op)						\
51ENTRY(atomic_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */	\
52	BACKOFF_SETUP(%o2);						\
531:	lduw	[%o1], %g1;						\
54	op	%g1, %o0, %g7;						\
55	cas	[%o1], %g1, %g7;					\
56	cmp	%g1, %g7;						\
57	bne,pn	%icc, BACKOFF_LABEL(2f, 1b);				\
58	 nop;								\
59	retl;								\
60	 sra	%g1, 0, %o0;						\
612:	BACKOFF_SPIN(%o2, %o3, 1b);					\
62ENDPROC(atomic_fetch_##op);						\
63EXPORT_SYMBOL(atomic_fetch_##op);
64
65#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
66
67ATOMIC_OPS(add)
68ATOMIC_OPS(sub)
69
70#undef ATOMIC_OPS
71#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
72
73ATOMIC_OPS(and)
74ATOMIC_OPS(or)
75ATOMIC_OPS(xor)
76
77#undef ATOMIC_OPS
78#undef ATOMIC_FETCH_OP
79#undef ATOMIC_OP_RETURN
80#undef ATOMIC_OP
81
82#define ATOMIC64_OP(op)							\
83ENTRY(atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */		\
84	BACKOFF_SETUP(%o2);						\
851:	ldx	[%o1], %g1;						\
86	op	%g1, %o0, %g7;						\
87	casx	[%o1], %g1, %g7;					\
88	cmp	%g1, %g7;						\
89	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b);				\
90	 nop;								\
91	retl;								\
92	 nop;								\
932:	BACKOFF_SPIN(%o2, %o3, 1b);					\
94ENDPROC(atomic64_##op);							\
95EXPORT_SYMBOL(atomic64_##op);
96
97#define ATOMIC64_OP_RETURN(op)						\
98ENTRY(atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */	\
99	BACKOFF_SETUP(%o2);						\
1001:	ldx	[%o1], %g1;						\
101	op	%g1, %o0, %g7;						\
102	casx	[%o1], %g1, %g7;					\
103	cmp	%g1, %g7;						\
104	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b);				\
105	 nop;								\
106	retl;								\
107	 op	%g1, %o0, %o0;						\
1082:	BACKOFF_SPIN(%o2, %o3, 1b);					\
109ENDPROC(atomic64_##op##_return);					\
110EXPORT_SYMBOL(atomic64_##op##_return);
111
112#define ATOMIC64_FETCH_OP(op)						\
113ENTRY(atomic64_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */	\
114	BACKOFF_SETUP(%o2);						\
1151:	ldx	[%o1], %g1;						\
116	op	%g1, %o0, %g7;						\
117	casx	[%o1], %g1, %g7;					\
118	cmp	%g1, %g7;						\
119	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b);				\
120	 nop;								\
121	retl;								\
122	 mov	%g1, %o0;						\
1232:	BACKOFF_SPIN(%o2, %o3, 1b);					\
124ENDPROC(atomic64_fetch_##op);						\
125EXPORT_SYMBOL(atomic64_fetch_##op);
126
127#define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op) ATOMIC64_FETCH_OP(op)
128
129ATOMIC64_OPS(add)
130ATOMIC64_OPS(sub)
131
132#undef ATOMIC64_OPS
133#define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_FETCH_OP(op)
134
135ATOMIC64_OPS(and)
136ATOMIC64_OPS(or)
137ATOMIC64_OPS(xor)
138
139#undef ATOMIC64_OPS
140#undef ATOMIC64_FETCH_OP
141#undef ATOMIC64_OP_RETURN
142#undef ATOMIC64_OP
143
144ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */
145	BACKOFF_SETUP(%o2)
1461:	ldx	[%o0], %g1
147	brlez,pn %g1, 3f
148	 sub	%g1, 1, %g7
149	casx	[%o0], %g1, %g7
150	cmp	%g1, %g7
151	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
152	 nop
1533:	retl
154	 sub	%g1, 1, %o0
1552:	BACKOFF_SPIN(%o2, %o3, 1b)
156ENDPROC(atomic64_dec_if_positive)
157EXPORT_SYMBOL(atomic64_dec_if_positive)
158