xref: /titanic_41/usr/src/common/atomic/sparc/atomic.s (revision 74e20cfe817b82802b16fac8690dadcda76f54f5)
1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License, Version 1.0 only
6 * (the "License").  You may not use this file except in compliance
7 * with the License.
8 *
9 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10 * or http://www.opensolaris.org/os/licensing.
11 * See the License for the specific language governing permissions
12 * and limitations under the License.
13 *
14 * When distributing Covered Code, include this CDDL HEADER in each
15 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16 * If applicable, add the following below this CDDL HEADER, with the
17 * fields enclosed by brackets "[]" replaced with your own identifying
18 * information: Portions Copyright [yyyy] [name of copyright owner]
19 *
20 * CDDL HEADER END
21 */
22/*
23 * Copyright 2005 Sun Microsystems, Inc.  All rights reserved.
24 * Use is subject to license terms.
25 */
26
27	.ident	"%Z%%M%	%I%	%E% SMI"
28
29	.file	"%M%"
30
31#include <sys/asm_linkage.h>
32
33#if defined(_KERNEL)
34	/*
35	 * Legacy kernel interfaces; they will go away (eventually).
36	 */
37	ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
38	ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
39	ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
40	ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
41	ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
42	ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
43	ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
44	ANSI_PRAGMA_WEAK2(swapl,atomic_swap_32,function)
45#else
46	/*
47	 * Include the definitions for the libc weak aliases.
48	 */
49#include "../atomic_asm_weak.h"
50#endif
51
52	ENTRY(atomic_inc_8)
53	ALTENTRY(atomic_inc_8_nv)
54	ALTENTRY(atomic_inc_uchar)
55	ALTENTRY(atomic_inc_uchar_nv)
56	ba	add_8
57	  add	%g0, 1, %o1
58	SET_SIZE(atomic_inc_uchar_nv)
59	SET_SIZE(atomic_inc_uchar)
60	SET_SIZE(atomic_inc_8_nv)
61	SET_SIZE(atomic_inc_8)
62
63	ENTRY(atomic_dec_8)
64	ALTENTRY(atomic_dec_8_nv)
65	ALTENTRY(atomic_dec_uchar)
66	ALTENTRY(atomic_dec_uchar_nv)
67	ba	add_8
68	  sub	%g0, 1, %o1
69	SET_SIZE(atomic_dec_uchar_nv)
70	SET_SIZE(atomic_dec_uchar)
71	SET_SIZE(atomic_dec_8_nv)
72	SET_SIZE(atomic_dec_8)
73
74	ENTRY(atomic_add_8)
75	ALTENTRY(atomic_add_8_nv)
76	ALTENTRY(atomic_add_char)
77	ALTENTRY(atomic_add_char_nv)
78add_8:
79	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
80	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
81	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
82	set	0xff, %o3		! %o3 = mask
83	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
84	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
85	and	%o1, %o3, %o1		! %o1 = single byte value
86	andn	%o0, 0x3, %o0		! %o0 = word address
87	ld	[%o0], %o2		! read old value
881:
89	add	%o2, %o1, %o5		! add value to the old value
90	and	%o5, %o3, %o5		! clear other bits
91	andn	%o2, %o3, %o4		! clear target bits
92	or	%o4, %o5, %o5		! insert the new value
93	cas	[%o0], %o2, %o5
94	cmp	%o2, %o5
95	bne,a,pn %icc, 1b
96	  mov	%o5, %o2		! %o2 = old value
97	add	%o2, %o1, %o5
98	and	%o5, %o3, %o5
99	retl
100	srl	%o5, %g1, %o0		! %o0 = new value
101	SET_SIZE(atomic_add_char_nv)
102	SET_SIZE(atomic_add_char)
103	SET_SIZE(atomic_add_8_nv)
104	SET_SIZE(atomic_add_8)
105
106	ENTRY(atomic_inc_16)
107	ALTENTRY(atomic_inc_16_nv)
108	ALTENTRY(atomic_inc_ushort)
109	ALTENTRY(atomic_inc_ushort_nv)
110	ba	add_16
111	  add	%g0, 1, %o1
112	SET_SIZE(atomic_inc_ushort_nv)
113	SET_SIZE(atomic_inc_ushort)
114	SET_SIZE(atomic_inc_16_nv)
115	SET_SIZE(atomic_inc_16)
116
117	ENTRY(atomic_dec_16)
118	ALTENTRY(atomic_dec_16_nv)
119	ALTENTRY(atomic_dec_ushort)
120	ALTENTRY(atomic_dec_ushort_nv)
121	ba	add_16
122	  sub	%g0, 1, %o1
123	SET_SIZE(atomic_dec_ushort_nv)
124	SET_SIZE(atomic_dec_ushort)
125	SET_SIZE(atomic_dec_16_nv)
126	SET_SIZE(atomic_dec_16)
127
128	ENTRY(atomic_add_16)
129	ALTENTRY(atomic_add_16_nv)
130	ALTENTRY(atomic_add_short)
131	ALTENTRY(atomic_add_short_nv)
132add_16:
133	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
134	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
135	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
136	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
137	sethi	%hi(0xffff0000), %o3	! %o3 = mask
138	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
139	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
140	and	%o1, %o3, %o1		! %o1 = single short value
141	andn	%o0, 0x2, %o0		! %o0 = word address
142	! if low-order bit is 1, we will properly get an alignment fault here
143	ld	[%o0], %o2		! read old value
1441:
145	add	%o1, %o2, %o5		! add value to the old value
146	and	%o5, %o3, %o5		! clear other bits
147	andn	%o2, %o3, %o4		! clear target bits
148	or	%o4, %o5, %o5		! insert the new value
149	cas	[%o0], %o2, %o5
150	cmp	%o2, %o5
151	bne,a,pn %icc, 1b
152	  mov	%o5, %o2		! %o2 = old value
153	add	%o1, %o2, %o5
154	and	%o5, %o3, %o5
155	retl
156	srl	%o5, %g1, %o0		! %o0 = new value
157	SET_SIZE(atomic_add_short_nv)
158	SET_SIZE(atomic_add_short)
159	SET_SIZE(atomic_add_16_nv)
160	SET_SIZE(atomic_add_16)
161
162	ENTRY(atomic_inc_32)
163	ALTENTRY(atomic_inc_32_nv)
164	ALTENTRY(atomic_inc_uint)
165	ALTENTRY(atomic_inc_uint_nv)
166	ALTENTRY(atomic_inc_ulong)
167	ALTENTRY(atomic_inc_ulong_nv)
168	ba	add_32
169	  add	%g0, 1, %o1
170	SET_SIZE(atomic_inc_ulong_nv)
171	SET_SIZE(atomic_inc_ulong)
172	SET_SIZE(atomic_inc_uint_nv)
173	SET_SIZE(atomic_inc_uint)
174	SET_SIZE(atomic_inc_32_nv)
175	SET_SIZE(atomic_inc_32)
176
177	ENTRY(atomic_dec_32)
178	ALTENTRY(atomic_dec_32_nv)
179	ALTENTRY(atomic_dec_uint)
180	ALTENTRY(atomic_dec_uint_nv)
181	ALTENTRY(atomic_dec_ulong)
182	ALTENTRY(atomic_dec_ulong_nv)
183	ba	add_32
184	  sub	%g0, 1, %o1
185	SET_SIZE(atomic_dec_ulong_nv)
186	SET_SIZE(atomic_dec_ulong)
187	SET_SIZE(atomic_dec_uint_nv)
188	SET_SIZE(atomic_dec_uint)
189	SET_SIZE(atomic_dec_32_nv)
190	SET_SIZE(atomic_dec_32)
191
192	ENTRY(atomic_add_32)
193	ALTENTRY(atomic_add_32_nv)
194	ALTENTRY(atomic_add_int)
195	ALTENTRY(atomic_add_int_nv)
196	ALTENTRY(atomic_add_ptr)
197	ALTENTRY(atomic_add_ptr_nv)
198	ALTENTRY(atomic_add_long)
199	ALTENTRY(atomic_add_long_nv)
200add_32:
201	ld	[%o0], %o2
2021:
203	add	%o2, %o1, %o3
204	cas	[%o0], %o2, %o3
205	cmp	%o2, %o3
206	bne,a,pn %icc, 1b
207	  mov	%o3, %o2
208	retl
209	add	%o2, %o1, %o0		! return new value
210	SET_SIZE(atomic_add_long_nv)
211	SET_SIZE(atomic_add_long)
212	SET_SIZE(atomic_add_ptr_nv)
213	SET_SIZE(atomic_add_ptr)
214	SET_SIZE(atomic_add_int_nv)
215	SET_SIZE(atomic_add_int)
216	SET_SIZE(atomic_add_32_nv)
217	SET_SIZE(atomic_add_32)
218
219	ENTRY(atomic_inc_64)
220	ALTENTRY(atomic_inc_64_nv)
221	ba	add_64
222	  add	%g0, 1, %o1
223	SET_SIZE(atomic_inc_64_nv)
224	SET_SIZE(atomic_inc_64)
225
226	ENTRY(atomic_dec_64)
227	ALTENTRY(atomic_dec_64_nv)
228	ba	add_64
229	  sub	%g0, 1, %o1
230	SET_SIZE(atomic_dec_64_nv)
231	SET_SIZE(atomic_dec_64)
232
233	ENTRY(atomic_add_64)
234	ALTENTRY(atomic_add_64_nv)
235	sllx	%o1, 32, %o1		! upper 32 in %o1, lower in %o2
236	srl	%o2, 0, %o2
237	add	%o1, %o2, %o1		! convert 2 32-bit args into 1 64-bit
238add_64:
239	ldx	[%o0], %o2
2401:
241	add	%o2, %o1, %o3
242	casx	[%o0], %o2, %o3
243	cmp	%o2, %o3
244	bne,a,pn %xcc, 1b
245	  mov	%o3, %o2
246	add	%o2, %o1, %o1		! return lower 32-bits in %o1
247	retl
248	srlx	%o1, 32, %o0		! return upper 32-bits in %o0
249	SET_SIZE(atomic_add_64_nv)
250	SET_SIZE(atomic_add_64)
251
252	ENTRY(atomic_or_8)
253	ALTENTRY(atomic_or_8_nv)
254	ALTENTRY(atomic_or_uchar)
255	ALTENTRY(atomic_or_uchar_nv)
256	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
257	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
258	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
259	set	0xff, %o3		! %o3 = mask
260	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
261	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
262	and	%o1, %o3, %o1		! %o1 = single byte value
263	andn	%o0, 0x3, %o0		! %o0 = word address
264	ld	[%o0], %o2		! read old value
2651:
266	or	%o2, %o1, %o5		! or in the new value
267	cas	[%o0], %o2, %o5
268	cmp	%o2, %o5
269	bne,a,pn %icc, 1b
270	  mov	%o5, %o2		! %o2 = old value
271	or	%o2, %o1, %o5
272	and	%o5, %o3, %o5
273	retl
274	srl	%o5, %g1, %o0		! %o0 = new value
275	SET_SIZE(atomic_or_uchar_nv)
276	SET_SIZE(atomic_or_uchar)
277	SET_SIZE(atomic_or_8_nv)
278	SET_SIZE(atomic_or_8)
279
280	ENTRY(atomic_or_16)
281	ALTENTRY(atomic_or_16_nv)
282	ALTENTRY(atomic_or_ushort)
283	ALTENTRY(atomic_or_ushort_nv)
284	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
285	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
286	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
287	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
288	sethi	%hi(0xffff0000), %o3	! %o3 = mask
289	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
290	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
291	and	%o1, %o3, %o1		! %o1 = single short value
292	andn	%o0, 0x2, %o0		! %o0 = word address
293	! if low-order bit is 1, we will properly get an alignment fault here
294	ld	[%o0], %o2		! read old value
2951:
296	or	%o2, %o1, %o5		! or in the new value
297	cas	[%o0], %o2, %o5
298	cmp	%o2, %o5
299	bne,a,pn %icc, 1b
300	  mov	%o5, %o2		! %o2 = old value
301	or	%o2, %o1, %o5		! or in the new value
302	and	%o5, %o3, %o5
303	retl
304	srl	%o5, %g1, %o0		! %o0 = new value
305	SET_SIZE(atomic_or_ushort_nv)
306	SET_SIZE(atomic_or_ushort)
307	SET_SIZE(atomic_or_16_nv)
308	SET_SIZE(atomic_or_16)
309
310	ENTRY(atomic_or_32)
311	ALTENTRY(atomic_or_32_nv)
312	ALTENTRY(atomic_or_uint)
313	ALTENTRY(atomic_or_uint_nv)
314	ALTENTRY(atomic_or_ulong)
315	ALTENTRY(atomic_or_ulong_nv)
316	ld	[%o0], %o2
3171:
318	or	%o2, %o1, %o3
319	cas	[%o0], %o2, %o3
320	cmp	%o2, %o3
321	bne,a,pn %icc, 1b
322	  mov	%o3, %o2
323	retl
324	or	%o2, %o1, %o0		! return new value
325	SET_SIZE(atomic_or_ulong_nv)
326	SET_SIZE(atomic_or_ulong)
327	SET_SIZE(atomic_or_uint_nv)
328	SET_SIZE(atomic_or_uint)
329	SET_SIZE(atomic_or_32_nv)
330	SET_SIZE(atomic_or_32)
331
332	ENTRY(atomic_or_64)
333	ALTENTRY(atomic_or_64_nv)
334	sllx	%o1, 32, %o1		! upper 32 in %o1, lower in %o2
335	srl	%o2, 0, %o2
336	add	%o1, %o2, %o1		! convert 2 32-bit args into 1 64-bit
337	ldx	[%o0], %o2
3381:
339	or	%o2, %o1, %o3
340	casx	[%o0], %o2, %o3
341	cmp	%o2, %o3
342	bne,a,pn %xcc, 1b
343	  mov	%o3, %o2
344	or	%o2, %o1, %o1		! return lower 32-bits in %o1
345	retl
346	srlx	%o1, 32, %o0		! return upper 32-bits in %o0
347	SET_SIZE(atomic_or_64_nv)
348	SET_SIZE(atomic_or_64)
349
350	ENTRY(atomic_and_8)
351	ALTENTRY(atomic_and_8_nv)
352	ALTENTRY(atomic_and_uchar)
353	ALTENTRY(atomic_and_uchar_nv)
354	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
355	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
356	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
357	set	0xff, %o3		! %o3 = mask
358	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
359	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
360	orn	%o1, %o3, %o1		! all ones in other bytes
361	andn	%o0, 0x3, %o0		! %o0 = word address
362	ld	[%o0], %o2		! read old value
3631:
364	and	%o2, %o1, %o5		! and in the new value
365	cas	[%o0], %o2, %o5
366	cmp	%o2, %o5
367	bne,a,pn %icc, 1b
368	  mov	%o5, %o2		! %o2 = old value
369	and	%o2, %o1, %o5
370	and	%o5, %o3, %o5
371	retl
372	srl	%o5, %g1, %o0		! %o0 = new value
373	SET_SIZE(atomic_and_uchar_nv)
374	SET_SIZE(atomic_and_uchar)
375	SET_SIZE(atomic_and_8_nv)
376	SET_SIZE(atomic_and_8)
377
378	ENTRY(atomic_and_16)
379	ALTENTRY(atomic_and_16_nv)
380	ALTENTRY(atomic_and_ushort)
381	ALTENTRY(atomic_and_ushort_nv)
382	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
383	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
384	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
385	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
386	sethi	%hi(0xffff0000), %o3	! %o3 = mask
387	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
388	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
389	orn	%o1, %o3, %o1		! all ones in the other half
390	andn	%o0, 0x2, %o0		! %o0 = word address
391	! if low-order bit is 1, we will properly get an alignment fault here
392	ld	[%o0], %o2		! read old value
3931:
394	and	%o2, %o1, %o5		! and in the new value
395	cas	[%o0], %o2, %o5
396	cmp	%o2, %o5
397	bne,a,pn %icc, 1b
398	  mov	%o5, %o2		! %o2 = old value
399	and	%o2, %o1, %o5
400	and	%o5, %o3, %o5
401	retl
402	srl	%o5, %g1, %o0		! %o0 = new value
403	SET_SIZE(atomic_and_ushort_nv)
404	SET_SIZE(atomic_and_ushort)
405	SET_SIZE(atomic_and_16_nv)
406	SET_SIZE(atomic_and_16)
407
408	ENTRY(atomic_and_32)
409	ALTENTRY(atomic_and_32_nv)
410	ALTENTRY(atomic_and_uint)
411	ALTENTRY(atomic_and_uint_nv)
412	ALTENTRY(atomic_and_ulong)
413	ALTENTRY(atomic_and_ulong_nv)
414	ld	[%o0], %o2
4151:
416	and	%o2, %o1, %o3
417	cas	[%o0], %o2, %o3
418	cmp	%o2, %o3
419	bne,a,pn %icc, 1b
420	  mov	%o3, %o2
421	retl
422	and	%o2, %o1, %o0		! return new value
423	SET_SIZE(atomic_and_ulong_nv)
424	SET_SIZE(atomic_and_ulong)
425	SET_SIZE(atomic_and_uint_nv)
426	SET_SIZE(atomic_and_uint)
427	SET_SIZE(atomic_and_32_nv)
428	SET_SIZE(atomic_and_32)
429
430	ENTRY(atomic_and_64)
431	ALTENTRY(atomic_and_64_nv)
432	sllx	%o1, 32, %o1		! upper 32 in %o1, lower in %o2
433	srl	%o2, 0, %o2
434	add	%o1, %o2, %o1		! convert 2 32-bit args into 1 64-bit
435	ldx	[%o0], %o2
4361:
437	and	%o2, %o1, %o3
438	casx	[%o0], %o2, %o3
439	cmp	%o2, %o3
440	bne,a,pn %xcc, 1b
441	  mov	%o3, %o2
442	and	%o2, %o1, %o1		! return lower 32-bits in %o1
443	retl
444	srlx	%o1, 32, %o0		! return upper 32-bits in %o0
445	SET_SIZE(atomic_and_64_nv)
446	SET_SIZE(atomic_and_64)
447
448	ENTRY(atomic_cas_8)
449	ALTENTRY(atomic_cas_uchar)
450	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
451	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
452	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
453	set	0xff, %o3		! %o3 = mask
454	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
455	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
456	and	%o1, %o3, %o1		! %o1 = single byte value
457	sll	%o2, %g1, %o2		! %o2 = shifted to bit offset
458	and	%o2, %o3, %o2		! %o2 = single byte value
459	andn	%o0, 0x3, %o0		! %o0 = word address
460	ld	[%o0], %o4		! read old value
4611:
462	andn	%o4, %o3, %o4		! clear target bits
463	or	%o4, %o2, %o5		! insert the new value
464	or	%o4, %o1, %o4		! insert the comparison value
465	cas	[%o0], %o4, %o5
466	cmp	%o4, %o5		! did we succeed?
467	be,pt	%icc, 2f
468	  and	%o5, %o3, %o4		! isolate the old value
469	cmp	%o1, %o4		! should we have succeeded?
470	be,a,pt	%icc, 1b		! yes, try again
471	  mov	%o5, %o4		! %o4 = old value
4722:
473	retl
474	srl	%o4, %g1, %o0		! %o0 = old value
475	SET_SIZE(atomic_cas_uchar)
476	SET_SIZE(atomic_cas_8)
477
478	ENTRY(atomic_cas_16)
479	ALTENTRY(atomic_cas_ushort)
480	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
481	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
482	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
483	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
484	sethi	%hi(0xffff0000), %o3	! %o3 = mask
485	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
486	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
487	and	%o1, %o3, %o1		! %o1 = single short value
488	sll	%o2, %g1, %o2		! %o2 = shifted to bit offset
489	and	%o2, %o3, %o2		! %o2 = single short value
490	andn	%o0, 0x2, %o0		! %o0 = word address
491	! if low-order bit is 1, we will properly get an alignment fault here
492	ld	[%o0], %o4		! read old value
4931:
494	andn	%o4, %o3, %o4		! clear target bits
495	or	%o4, %o2, %o5		! insert the new value
496	or	%o4, %o1, %o4		! insert the comparison value
497	cas	[%o0], %o4, %o5
498	cmp	%o4, %o5		! did we succeed?
499	be,pt	%icc, 2f
500	  and	%o5, %o3, %o4		! isolate the old value
501	cmp	%o1, %o4		! should we have succeeded?
502	be,a,pt	%icc, 1b		! yes, try again
503	  mov	%o5, %o4		! %o4 = old value
5042:
505	retl
506	srl	%o4, %g1, %o0		! %o0 = old value
507	SET_SIZE(atomic_cas_ushort)
508	SET_SIZE(atomic_cas_16)
509
510	ENTRY(atomic_cas_32)
511	ALTENTRY(atomic_cas_uint)
512	ALTENTRY(atomic_cas_ptr)
513	ALTENTRY(atomic_cas_ulong)
514	cas	[%o0], %o1, %o2
515	retl
516	mov	%o2, %o0
517	SET_SIZE(atomic_cas_ulong)
518	SET_SIZE(atomic_cas_ptr)
519	SET_SIZE(atomic_cas_uint)
520	SET_SIZE(atomic_cas_32)
521
522	ENTRY(atomic_cas_64)
523	sllx	%o1, 32, %o1		! cmp's upper 32 in %o1, lower in %o2
524	srl	%o2, 0, %o2		! convert 2 32-bit args into 1 64-bit
525	add	%o1, %o2, %o1
526	sllx	%o3, 32, %o2		! newval upper 32 in %o3, lower in %o4
527	srl	%o4, 0, %o4		! setup %o2 to have newval
528	add	%o2, %o4, %o2
529	casx	[%o0], %o1, %o2
530	srl	%o2, 0, %o1		! return lower 32-bits in %o1
531	retl
532	srlx	%o2, 32, %o0		! return upper 32-bits in %o0
533	SET_SIZE(atomic_cas_64)
534
535	ENTRY(atomic_swap_8)
536	ALTENTRY(atomic_swap_uchar)
537	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
538	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
539	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
540	set	0xff, %o3		! %o3 = mask
541	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
542	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
543	and	%o1, %o3, %o1		! %o1 = single byte value
544	andn	%o0, 0x3, %o0		! %o0 = word address
545	ld	[%o0], %o2		! read old value
5461:
547	andn	%o2, %o3, %o5		! clear target bits
548	or	%o5, %o1, %o5		! insert the new value
549	cas	[%o0], %o2, %o5
550	cmp	%o2, %o5
551	bne,a,pn %icc, 1b
552	  mov	%o5, %o2		! %o2 = old value
553	and	%o5, %o3, %o5
554	retl
555	srl	%o5, %g1, %o0		! %o0 = old value
556	SET_SIZE(atomic_swap_uchar)
557	SET_SIZE(atomic_swap_8)
558
559	ENTRY(atomic_swap_16)
560	ALTENTRY(atomic_swap_ushort)
561	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
562	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
563	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
564	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
565	sethi	%hi(0xffff0000), %o3	! %o3 = mask
566	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
567	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
568	and	%o1, %o3, %o1		! %o1 = single short value
569	andn	%o0, 0x2, %o0		! %o0 = word address
570	! if low-order bit is 1, we will properly get an alignment fault here
571	ld	[%o0], %o2		! read old value
5721:
573	andn	%o2, %o3, %o5		! clear target bits
574	or	%o5, %o1, %o5		! insert the new value
575	cas	[%o0], %o2, %o5
576	cmp	%o2, %o5
577	bne,a,pn %icc, 1b
578	  mov	%o5, %o2		! %o2 = old value
579	and	%o5, %o3, %o5
580	retl
581	srl	%o5, %g1, %o0		! %o0 = old value
582	SET_SIZE(atomic_swap_ushort)
583	SET_SIZE(atomic_swap_16)
584
585	ENTRY(atomic_swap_32)
586	ALTENTRY(atomic_swap_uint)
587	ALTENTRY(atomic_swap_ptr)
588	ALTENTRY(atomic_swap_ulong)
589	ld	[%o0], %o2
5901:
591	mov	%o1, %o3
592	cas	[%o0], %o2, %o3
593	cmp	%o2, %o3
594	bne,a,pn %icc, 1b
595	  mov	%o3, %o2
596	retl
597	mov	%o3, %o0
598	SET_SIZE(atomic_swap_ulong)
599	SET_SIZE(atomic_swap_ptr)
600	SET_SIZE(atomic_swap_uint)
601	SET_SIZE(atomic_swap_32)
602
603	ENTRY(atomic_swap_64)
604	sllx	%o1, 32, %o1		! upper 32 in %o1, lower in %o2
605	srl	%o2, 0, %o2
606	add	%o1, %o2, %o1		! convert 2 32-bit args into 1 64-bit
607	ldx	[%o0], %o2
6081:
609	mov	%o1, %o3
610	casx	[%o0], %o2, %o3
611	cmp	%o2, %o3
612	bne,a,pn %xcc, 1b
613	  mov	%o3, %o2
614	srl	%o3, 0, %o1		! return lower 32-bits in %o1
615	retl
616	srlx	%o3, 32, %o0		! return upper 32-bits in %o0
617	SET_SIZE(atomic_swap_64)
618
619	ENTRY(atomic_set_long_excl)
620	mov	1, %o3
621	slln	%o3, %o1, %o3
622	ldn	[%o0], %o2
6231:
624	andcc	%o2, %o3, %g0		! test if the bit is set
625	bnz,a,pn %ncc, 2f		! if so, then fail out
626	  mov	-1, %o0
627	or	%o2, %o3, %o4		! set the bit, and try to commit it
628	casn	[%o0], %o2, %o4
629	cmp	%o2, %o4
630	bne,a,pn %ncc, 1b		! failed to commit, try again
631	  mov	%o4, %o2
632	mov	%g0, %o0
6332:
634	retl
635	nop
636	SET_SIZE(atomic_set_long_excl)
637
638	ENTRY(atomic_clear_long_excl)
639	mov	1, %o3
640	slln	%o3, %o1, %o3
641	ldn	[%o0], %o2
6421:
643	andncc	%o3, %o2, %g0		! test if the bit is clear
644	bnz,a,pn %ncc, 2f		! if so, then fail out
645	  mov	-1, %o0
646	andn	%o2, %o3, %o4		! clear the bit, and try to commit it
647	casn	[%o0], %o2, %o4
648	cmp	%o2, %o4
649	bne,a,pn %ncc, 1b		! failed to commit, try again
650	  mov	%o4, %o2
651	mov	%g0, %o0
6522:
653	retl
654	nop
655	SET_SIZE(atomic_clear_long_excl)
656
657#if !defined(_KERNEL)
658
659	/*
660	 * Spitfires and Blackbirds have a problem with membars in the
661	 * delay slot (SF_ERRATA_51).  For safety's sake, we assume
662	 * that the whole world needs the workaround.
663	 */
664	ENTRY(membar_enter)
665	membar	#StoreLoad|#StoreStore
666	retl
667	nop
668	SET_SIZE(membar_enter)
669
670	ENTRY(membar_exit)
671	membar	#LoadStore|#StoreStore
672	retl
673	nop
674	SET_SIZE(membar_exit)
675
676	ENTRY(membar_producer)
677	membar	#StoreStore
678	retl
679	nop
680	SET_SIZE(membar_producer)
681
682	ENTRY(membar_consumer)
683	membar	#LoadLoad
684	retl
685	nop
686	SET_SIZE(membar_consumer)
687
688#endif	/* !_KERNEL */
689