xref: /titanic_50/usr/src/common/atomic/sparc/atomic.s (revision 22337b4b3c8a2b9db615e524e0e9e1fbd3dc71bf)
1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21/*
22 * Copyright 2007 Sun Microsystems, Inc.  All rights reserved.
23 * Use is subject to license terms.
24 */
25
26	.ident	"%Z%%M%	%I%	%E% SMI"
27
28	.file	"%M%"
29
30#include <sys/asm_linkage.h>
31
32#if defined(_KERNEL)
33	/*
34	 * Legacy kernel interfaces; they will go away (eventually).
35	 */
36	ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
37	ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
38	ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
39	ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
40	ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
41	ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
42	ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
43	ANSI_PRAGMA_WEAK2(swapl,atomic_swap_32,function)
44#else
45	/*
46	 * Include the definitions for the libc weak aliases.
47	 */
48#include "../atomic_asm_weak.h"
49#endif
50
51	/*
52	 * NOTE: If atomic_inc_8 and atomic_inc_8_nv are ever
53	 * separated, you need to also edit the libc sparc platform
54	 * specific mapfile and remove the NODYNSORT attribute
55	 * from atomic_inc_8_nv.
56	 */
57	ENTRY(atomic_inc_8)
58	ALTENTRY(atomic_inc_8_nv)
59	ALTENTRY(atomic_inc_uchar)
60	ALTENTRY(atomic_inc_uchar_nv)
61	ba	add_8
62	  add	%g0, 1, %o1
63	SET_SIZE(atomic_inc_uchar_nv)
64	SET_SIZE(atomic_inc_uchar)
65	SET_SIZE(atomic_inc_8_nv)
66	SET_SIZE(atomic_inc_8)
67
68	/*
69	 * NOTE: If atomic_dec_8 and atomic_dec_8_nv are ever
70	 * separated, you need to also edit the libc sparc platform
71	 * specific mapfile and remove the NODYNSORT attribute
72	 * from atomic_dec_8_nv.
73	 */
74	ENTRY(atomic_dec_8)
75	ALTENTRY(atomic_dec_8_nv)
76	ALTENTRY(atomic_dec_uchar)
77	ALTENTRY(atomic_dec_uchar_nv)
78	ba	add_8
79	  sub	%g0, 1, %o1
80	SET_SIZE(atomic_dec_uchar_nv)
81	SET_SIZE(atomic_dec_uchar)
82	SET_SIZE(atomic_dec_8_nv)
83	SET_SIZE(atomic_dec_8)
84
85	/*
86	 * NOTE: If atomic_add_8 and atomic_add_8_nv are ever
87	 * separated, you need to also edit the libc sparc platform
88	 * specific mapfile and remove the NODYNSORT attribute
89	 * from atomic_add_8_nv.
90	 */
91	ENTRY(atomic_add_8)
92	ALTENTRY(atomic_add_8_nv)
93	ALTENTRY(atomic_add_char)
94	ALTENTRY(atomic_add_char_nv)
95add_8:
96	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
97	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
98	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
99	set	0xff, %o3		! %o3 = mask
100	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
101	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
102	and	%o1, %o3, %o1		! %o1 = single byte value
103	andn	%o0, 0x3, %o0		! %o0 = word address
104	ld	[%o0], %o2		! read old value
1051:
106	add	%o2, %o1, %o5		! add value to the old value
107	and	%o5, %o3, %o5		! clear other bits
108	andn	%o2, %o3, %o4		! clear target bits
109	or	%o4, %o5, %o5		! insert the new value
110	cas	[%o0], %o2, %o5
111	cmp	%o2, %o5
112	bne,a,pn %icc, 1b
113	  mov	%o5, %o2		! %o2 = old value
114	add	%o2, %o1, %o5
115	and	%o5, %o3, %o5
116	retl
117	srl	%o5, %g1, %o0		! %o0 = new value
118	SET_SIZE(atomic_add_char_nv)
119	SET_SIZE(atomic_add_char)
120	SET_SIZE(atomic_add_8_nv)
121	SET_SIZE(atomic_add_8)
122
123	/*
124	 * NOTE: If atomic_inc_16 and atomic_inc_16_nv are ever
125	 * separated, you need to also edit the libc sparc platform
126	 * specific mapfile and remove the NODYNSORT attribute
127	 * from atomic_inc_16_nv.
128	 */
129	ENTRY(atomic_inc_16)
130	ALTENTRY(atomic_inc_16_nv)
131	ALTENTRY(atomic_inc_ushort)
132	ALTENTRY(atomic_inc_ushort_nv)
133	ba	add_16
134	  add	%g0, 1, %o1
135	SET_SIZE(atomic_inc_ushort_nv)
136	SET_SIZE(atomic_inc_ushort)
137	SET_SIZE(atomic_inc_16_nv)
138	SET_SIZE(atomic_inc_16)
139
140	/*
141	 * NOTE: If atomic_dec_16 and atomic_dec_16_nv are ever
142	 * separated, you need to also edit the libc sparc platform
143	 * specific mapfile and remove the NODYNSORT attribute
144	 * from atomic_dec_16_nv.
145	 */
146	ENTRY(atomic_dec_16)
147	ALTENTRY(atomic_dec_16_nv)
148	ALTENTRY(atomic_dec_ushort)
149	ALTENTRY(atomic_dec_ushort_nv)
150	ba	add_16
151	  sub	%g0, 1, %o1
152	SET_SIZE(atomic_dec_ushort_nv)
153	SET_SIZE(atomic_dec_ushort)
154	SET_SIZE(atomic_dec_16_nv)
155	SET_SIZE(atomic_dec_16)
156
157	/*
158	 * NOTE: If atomic_add_16 and atomic_add_16_nv are ever
159	 * separated, you need to also edit the libc sparc platform
160	 * specific mapfile and remove the NODYNSORT attribute
161	 * from atomic_add_16_nv.
162	 */
163	ENTRY(atomic_add_16)
164	ALTENTRY(atomic_add_16_nv)
165	ALTENTRY(atomic_add_short)
166	ALTENTRY(atomic_add_short_nv)
167add_16:
168	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
169	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
170	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
171	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
172	sethi	%hi(0xffff0000), %o3	! %o3 = mask
173	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
174	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
175	and	%o1, %o3, %o1		! %o1 = single short value
176	andn	%o0, 0x2, %o0		! %o0 = word address
177	! if low-order bit is 1, we will properly get an alignment fault here
178	ld	[%o0], %o2		! read old value
1791:
180	add	%o1, %o2, %o5		! add value to the old value
181	and	%o5, %o3, %o5		! clear other bits
182	andn	%o2, %o3, %o4		! clear target bits
183	or	%o4, %o5, %o5		! insert the new value
184	cas	[%o0], %o2, %o5
185	cmp	%o2, %o5
186	bne,a,pn %icc, 1b
187	  mov	%o5, %o2		! %o2 = old value
188	add	%o1, %o2, %o5
189	and	%o5, %o3, %o5
190	retl
191	srl	%o5, %g1, %o0		! %o0 = new value
192	SET_SIZE(atomic_add_short_nv)
193	SET_SIZE(atomic_add_short)
194	SET_SIZE(atomic_add_16_nv)
195	SET_SIZE(atomic_add_16)
196
197	/*
198	 * NOTE: If atomic_inc_32 and atomic_inc_32_nv are ever
199	 * separated, you need to also edit the libc sparc platform
200	 * specific mapfile and remove the NODYNSORT attribute
201	 * from atomic_inc_32_nv.
202	 */
203	ENTRY(atomic_inc_32)
204	ALTENTRY(atomic_inc_32_nv)
205	ALTENTRY(atomic_inc_uint)
206	ALTENTRY(atomic_inc_uint_nv)
207	ALTENTRY(atomic_inc_ulong)
208	ALTENTRY(atomic_inc_ulong_nv)
209	ba	add_32
210	  add	%g0, 1, %o1
211	SET_SIZE(atomic_inc_ulong_nv)
212	SET_SIZE(atomic_inc_ulong)
213	SET_SIZE(atomic_inc_uint_nv)
214	SET_SIZE(atomic_inc_uint)
215	SET_SIZE(atomic_inc_32_nv)
216	SET_SIZE(atomic_inc_32)
217
218	/*
219	 * NOTE: If atomic_dec_32 and atomic_dec_32_nv are ever
220	 * separated, you need to also edit the libc sparc platform
221	 * specific mapfile and remove the NODYNSORT attribute
222	 * from atomic_dec_32_nv.
223	 */
224	ENTRY(atomic_dec_32)
225	ALTENTRY(atomic_dec_32_nv)
226	ALTENTRY(atomic_dec_uint)
227	ALTENTRY(atomic_dec_uint_nv)
228	ALTENTRY(atomic_dec_ulong)
229	ALTENTRY(atomic_dec_ulong_nv)
230	ba	add_32
231	  sub	%g0, 1, %o1
232	SET_SIZE(atomic_dec_ulong_nv)
233	SET_SIZE(atomic_dec_ulong)
234	SET_SIZE(atomic_dec_uint_nv)
235	SET_SIZE(atomic_dec_uint)
236	SET_SIZE(atomic_dec_32_nv)
237	SET_SIZE(atomic_dec_32)
238
239	/*
240	 * NOTE: If atomic_add_32 and atomic_add_32_nv are ever
241	 * separated, you need to also edit the libc sparc platform
242	 * specific mapfile and remove the NODYNSORT attribute
243	 * from atomic_add_32_nv.
244	 */
245	ENTRY(atomic_add_32)
246	ALTENTRY(atomic_add_32_nv)
247	ALTENTRY(atomic_add_int)
248	ALTENTRY(atomic_add_int_nv)
249	ALTENTRY(atomic_add_ptr)
250	ALTENTRY(atomic_add_ptr_nv)
251	ALTENTRY(atomic_add_long)
252	ALTENTRY(atomic_add_long_nv)
253add_32:
254	ld	[%o0], %o2
2551:
256	add	%o2, %o1, %o3
257	cas	[%o0], %o2, %o3
258	cmp	%o2, %o3
259	bne,a,pn %icc, 1b
260	  mov	%o3, %o2
261	retl
262	add	%o2, %o1, %o0		! return new value
263	SET_SIZE(atomic_add_long_nv)
264	SET_SIZE(atomic_add_long)
265	SET_SIZE(atomic_add_ptr_nv)
266	SET_SIZE(atomic_add_ptr)
267	SET_SIZE(atomic_add_int_nv)
268	SET_SIZE(atomic_add_int)
269	SET_SIZE(atomic_add_32_nv)
270	SET_SIZE(atomic_add_32)
271
272	/*
273	 * NOTE: If atomic_inc_64 and atomic_inc_64_nv are ever
274	 * separated, you need to also edit the libc sparc platform
275	 * specific mapfile and remove the NODYNSORT attribute
276	 * from atomic_inc_64_nv.
277	 */
278	ENTRY(atomic_inc_64)
279	ALTENTRY(atomic_inc_64_nv)
280	ba	add_64
281	  add	%g0, 1, %o1
282	SET_SIZE(atomic_inc_64_nv)
283	SET_SIZE(atomic_inc_64)
284
285	/*
286	 * NOTE: If atomic_dec_64 and atomic_dec_64_nv are ever
287	 * separated, you need to also edit the libc sparc platform
288	 * specific mapfile and remove the NODYNSORT attribute
289	 * from atomic_dec_64_nv.
290	 */
291	ENTRY(atomic_dec_64)
292	ALTENTRY(atomic_dec_64_nv)
293	ba	add_64
294	  sub	%g0, 1, %o1
295	SET_SIZE(atomic_dec_64_nv)
296	SET_SIZE(atomic_dec_64)
297
298	/*
299	 * NOTE: If atomic_add_64 and atomic_add_64_nv are ever
300	 * separated, you need to also edit the libc sparc platform
301	 * specific mapfile and remove the NODYNSORT attribute
302	 * from atomic_add_64_nv.
303	 */
304	ENTRY(atomic_add_64)
305	ALTENTRY(atomic_add_64_nv)
306	sllx	%o1, 32, %o1		! upper 32 in %o1, lower in %o2
307	srl	%o2, 0, %o2
308	add	%o1, %o2, %o1		! convert 2 32-bit args into 1 64-bit
309add_64:
310	ldx	[%o0], %o2
3111:
312	add	%o2, %o1, %o3
313	casx	[%o0], %o2, %o3
314	cmp	%o2, %o3
315	bne,a,pn %xcc, 1b
316	  mov	%o3, %o2
317	add	%o2, %o1, %o1		! return lower 32-bits in %o1
318	retl
319	srlx	%o1, 32, %o0		! return upper 32-bits in %o0
320	SET_SIZE(atomic_add_64_nv)
321	SET_SIZE(atomic_add_64)
322
323	/*
324	 * NOTE: If atomic_or_8 and atomic_or_8_nv are ever
325	 * separated, you need to also edit the libc sparc platform
326	 * specific mapfile and remove the NODYNSORT attribute
327	 * from atomic_or_8_nv.
328	 */
329	ENTRY(atomic_or_8)
330	ALTENTRY(atomic_or_8_nv)
331	ALTENTRY(atomic_or_uchar)
332	ALTENTRY(atomic_or_uchar_nv)
333	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
334	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
335	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
336	set	0xff, %o3		! %o3 = mask
337	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
338	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
339	and	%o1, %o3, %o1		! %o1 = single byte value
340	andn	%o0, 0x3, %o0		! %o0 = word address
341	ld	[%o0], %o2		! read old value
3421:
343	or	%o2, %o1, %o5		! or in the new value
344	cas	[%o0], %o2, %o5
345	cmp	%o2, %o5
346	bne,a,pn %icc, 1b
347	  mov	%o5, %o2		! %o2 = old value
348	or	%o2, %o1, %o5
349	and	%o5, %o3, %o5
350	retl
351	srl	%o5, %g1, %o0		! %o0 = new value
352	SET_SIZE(atomic_or_uchar_nv)
353	SET_SIZE(atomic_or_uchar)
354	SET_SIZE(atomic_or_8_nv)
355	SET_SIZE(atomic_or_8)
356
357	/*
358	 * NOTE: If atomic_or_16 and atomic_or_16_nv are ever
359	 * separated, you need to also edit the libc sparc platform
360	 * specific mapfile and remove the NODYNSORT attribute
361	 * from atomic_or_16_nv.
362	 */
363	ENTRY(atomic_or_16)
364	ALTENTRY(atomic_or_16_nv)
365	ALTENTRY(atomic_or_ushort)
366	ALTENTRY(atomic_or_ushort_nv)
367	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
368	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
369	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
370	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
371	sethi	%hi(0xffff0000), %o3	! %o3 = mask
372	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
373	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
374	and	%o1, %o3, %o1		! %o1 = single short value
375	andn	%o0, 0x2, %o0		! %o0 = word address
376	! if low-order bit is 1, we will properly get an alignment fault here
377	ld	[%o0], %o2		! read old value
3781:
379	or	%o2, %o1, %o5		! or in the new value
380	cas	[%o0], %o2, %o5
381	cmp	%o2, %o5
382	bne,a,pn %icc, 1b
383	  mov	%o5, %o2		! %o2 = old value
384	or	%o2, %o1, %o5		! or in the new value
385	and	%o5, %o3, %o5
386	retl
387	srl	%o5, %g1, %o0		! %o0 = new value
388	SET_SIZE(atomic_or_ushort_nv)
389	SET_SIZE(atomic_or_ushort)
390	SET_SIZE(atomic_or_16_nv)
391	SET_SIZE(atomic_or_16)
392
393	/*
394	 * NOTE: If atomic_or_32 and atomic_or_32_nv are ever
395	 * separated, you need to also edit the libc sparc platform
396	 * specific mapfile and remove the NODYNSORT attribute
397	 * from atomic_or_32_nv.
398	 */
399	ENTRY(atomic_or_32)
400	ALTENTRY(atomic_or_32_nv)
401	ALTENTRY(atomic_or_uint)
402	ALTENTRY(atomic_or_uint_nv)
403	ALTENTRY(atomic_or_ulong)
404	ALTENTRY(atomic_or_ulong_nv)
405	ld	[%o0], %o2
4061:
407	or	%o2, %o1, %o3
408	cas	[%o0], %o2, %o3
409	cmp	%o2, %o3
410	bne,a,pn %icc, 1b
411	  mov	%o3, %o2
412	retl
413	or	%o2, %o1, %o0		! return new value
414	SET_SIZE(atomic_or_ulong_nv)
415	SET_SIZE(atomic_or_ulong)
416	SET_SIZE(atomic_or_uint_nv)
417	SET_SIZE(atomic_or_uint)
418	SET_SIZE(atomic_or_32_nv)
419	SET_SIZE(atomic_or_32)
420
421	/*
422	 * NOTE: If atomic_or_64 and atomic_or_64_nv are ever
423	 * separated, you need to also edit the libc sparc platform
424	 * specific mapfile and remove the NODYNSORT attribute
425	 * from atomic_or_64_nv.
426	 */
427	ENTRY(atomic_or_64)
428	ALTENTRY(atomic_or_64_nv)
429	sllx	%o1, 32, %o1		! upper 32 in %o1, lower in %o2
430	srl	%o2, 0, %o2
431	add	%o1, %o2, %o1		! convert 2 32-bit args into 1 64-bit
432	ldx	[%o0], %o2
4331:
434	or	%o2, %o1, %o3
435	casx	[%o0], %o2, %o3
436	cmp	%o2, %o3
437	bne,a,pn %xcc, 1b
438	  mov	%o3, %o2
439	or	%o2, %o1, %o1		! return lower 32-bits in %o1
440	retl
441	srlx	%o1, 32, %o0		! return upper 32-bits in %o0
442	SET_SIZE(atomic_or_64_nv)
443	SET_SIZE(atomic_or_64)
444
445	/*
446	 * NOTE: If atomic_and_8 and atomic_and_8_nv are ever
447	 * separated, you need to also edit the libc sparc platform
448	 * specific mapfile and remove the NODYNSORT attribute
449	 * from atomic_and_8_nv.
450	 */
451	ENTRY(atomic_and_8)
452	ALTENTRY(atomic_and_8_nv)
453	ALTENTRY(atomic_and_uchar)
454	ALTENTRY(atomic_and_uchar_nv)
455	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
456	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
457	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
458	set	0xff, %o3		! %o3 = mask
459	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
460	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
461	orn	%o1, %o3, %o1		! all ones in other bytes
462	andn	%o0, 0x3, %o0		! %o0 = word address
463	ld	[%o0], %o2		! read old value
4641:
465	and	%o2, %o1, %o5		! and in the new value
466	cas	[%o0], %o2, %o5
467	cmp	%o2, %o5
468	bne,a,pn %icc, 1b
469	  mov	%o5, %o2		! %o2 = old value
470	and	%o2, %o1, %o5
471	and	%o5, %o3, %o5
472	retl
473	srl	%o5, %g1, %o0		! %o0 = new value
474	SET_SIZE(atomic_and_uchar_nv)
475	SET_SIZE(atomic_and_uchar)
476	SET_SIZE(atomic_and_8_nv)
477	SET_SIZE(atomic_and_8)
478
479	/*
480	 * NOTE: If atomic_and_16 and atomic_and_16_nv are ever
481	 * separated, you need to also edit the libc sparc platform
482	 * specific mapfile and remove the NODYNSORT attribute
483	 * from atomic_and_16_nv.
484	 */
485	ENTRY(atomic_and_16)
486	ALTENTRY(atomic_and_16_nv)
487	ALTENTRY(atomic_and_ushort)
488	ALTENTRY(atomic_and_ushort_nv)
489	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
490	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
491	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
492	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
493	sethi	%hi(0xffff0000), %o3	! %o3 = mask
494	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
495	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
496	orn	%o1, %o3, %o1		! all ones in the other half
497	andn	%o0, 0x2, %o0		! %o0 = word address
498	! if low-order bit is 1, we will properly get an alignment fault here
499	ld	[%o0], %o2		! read old value
5001:
501	and	%o2, %o1, %o5		! and in the new value
502	cas	[%o0], %o2, %o5
503	cmp	%o2, %o5
504	bne,a,pn %icc, 1b
505	  mov	%o5, %o2		! %o2 = old value
506	and	%o2, %o1, %o5
507	and	%o5, %o3, %o5
508	retl
509	srl	%o5, %g1, %o0		! %o0 = new value
510	SET_SIZE(atomic_and_ushort_nv)
511	SET_SIZE(atomic_and_ushort)
512	SET_SIZE(atomic_and_16_nv)
513	SET_SIZE(atomic_and_16)
514
515	/*
516	 * NOTE: If atomic_and_32 and atomic_and_32_nv are ever
517	 * separated, you need to also edit the libc sparc platform
518	 * specific mapfile and remove the NODYNSORT attribute
519	 * from atomic_and_32_nv.
520	 */
521	ENTRY(atomic_and_32)
522	ALTENTRY(atomic_and_32_nv)
523	ALTENTRY(atomic_and_uint)
524	ALTENTRY(atomic_and_uint_nv)
525	ALTENTRY(atomic_and_ulong)
526	ALTENTRY(atomic_and_ulong_nv)
527	ld	[%o0], %o2
5281:
529	and	%o2, %o1, %o3
530	cas	[%o0], %o2, %o3
531	cmp	%o2, %o3
532	bne,a,pn %icc, 1b
533	  mov	%o3, %o2
534	retl
535	and	%o2, %o1, %o0		! return new value
536	SET_SIZE(atomic_and_ulong_nv)
537	SET_SIZE(atomic_and_ulong)
538	SET_SIZE(atomic_and_uint_nv)
539	SET_SIZE(atomic_and_uint)
540	SET_SIZE(atomic_and_32_nv)
541	SET_SIZE(atomic_and_32)
542
543	/*
544	 * NOTE: If atomic_and_64 and atomic_and_64_nv are ever
545	 * separated, you need to also edit the libc sparc platform
546	 * specific mapfile and remove the NODYNSORT attribute
547	 * from atomic_and_64_nv.
548	 */
549	ENTRY(atomic_and_64)
550	ALTENTRY(atomic_and_64_nv)
551	sllx	%o1, 32, %o1		! upper 32 in %o1, lower in %o2
552	srl	%o2, 0, %o2
553	add	%o1, %o2, %o1		! convert 2 32-bit args into 1 64-bit
554	ldx	[%o0], %o2
5551:
556	and	%o2, %o1, %o3
557	casx	[%o0], %o2, %o3
558	cmp	%o2, %o3
559	bne,a,pn %xcc, 1b
560	  mov	%o3, %o2
561	and	%o2, %o1, %o1		! return lower 32-bits in %o1
562	retl
563	srlx	%o1, 32, %o0		! return upper 32-bits in %o0
564	SET_SIZE(atomic_and_64_nv)
565	SET_SIZE(atomic_and_64)
566
567	ENTRY(atomic_cas_8)
568	ALTENTRY(atomic_cas_uchar)
569	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
570	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
571	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
572	set	0xff, %o3		! %o3 = mask
573	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
574	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
575	and	%o1, %o3, %o1		! %o1 = single byte value
576	sll	%o2, %g1, %o2		! %o2 = shifted to bit offset
577	and	%o2, %o3, %o2		! %o2 = single byte value
578	andn	%o0, 0x3, %o0		! %o0 = word address
579	ld	[%o0], %o4		! read old value
5801:
581	andn	%o4, %o3, %o4		! clear target bits
582	or	%o4, %o2, %o5		! insert the new value
583	or	%o4, %o1, %o4		! insert the comparison value
584	cas	[%o0], %o4, %o5
585	cmp	%o4, %o5		! did we succeed?
586	be,pt	%icc, 2f
587	  and	%o5, %o3, %o4		! isolate the old value
588	cmp	%o1, %o4		! should we have succeeded?
589	be,a,pt	%icc, 1b		! yes, try again
590	  mov	%o5, %o4		! %o4 = old value
5912:
592	retl
593	srl	%o4, %g1, %o0		! %o0 = old value
594	SET_SIZE(atomic_cas_uchar)
595	SET_SIZE(atomic_cas_8)
596
597	ENTRY(atomic_cas_16)
598	ALTENTRY(atomic_cas_ushort)
599	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
600	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
601	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
602	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
603	sethi	%hi(0xffff0000), %o3	! %o3 = mask
604	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
605	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
606	and	%o1, %o3, %o1		! %o1 = single short value
607	sll	%o2, %g1, %o2		! %o2 = shifted to bit offset
608	and	%o2, %o3, %o2		! %o2 = single short value
609	andn	%o0, 0x2, %o0		! %o0 = word address
610	! if low-order bit is 1, we will properly get an alignment fault here
611	ld	[%o0], %o4		! read old value
6121:
613	andn	%o4, %o3, %o4		! clear target bits
614	or	%o4, %o2, %o5		! insert the new value
615	or	%o4, %o1, %o4		! insert the comparison value
616	cas	[%o0], %o4, %o5
617	cmp	%o4, %o5		! did we succeed?
618	be,pt	%icc, 2f
619	  and	%o5, %o3, %o4		! isolate the old value
620	cmp	%o1, %o4		! should we have succeeded?
621	be,a,pt	%icc, 1b		! yes, try again
622	  mov	%o5, %o4		! %o4 = old value
6232:
624	retl
625	srl	%o4, %g1, %o0		! %o0 = old value
626	SET_SIZE(atomic_cas_ushort)
627	SET_SIZE(atomic_cas_16)
628
629	ENTRY(atomic_cas_32)
630	ALTENTRY(atomic_cas_uint)
631	ALTENTRY(atomic_cas_ptr)
632	ALTENTRY(atomic_cas_ulong)
633	cas	[%o0], %o1, %o2
634	retl
635	mov	%o2, %o0
636	SET_SIZE(atomic_cas_ulong)
637	SET_SIZE(atomic_cas_ptr)
638	SET_SIZE(atomic_cas_uint)
639	SET_SIZE(atomic_cas_32)
640
641	ENTRY(atomic_cas_64)
642	sllx	%o1, 32, %o1		! cmp's upper 32 in %o1, lower in %o2
643	srl	%o2, 0, %o2		! convert 2 32-bit args into 1 64-bit
644	add	%o1, %o2, %o1
645	sllx	%o3, 32, %o2		! newval upper 32 in %o3, lower in %o4
646	srl	%o4, 0, %o4		! setup %o2 to have newval
647	add	%o2, %o4, %o2
648	casx	[%o0], %o1, %o2
649	srl	%o2, 0, %o1		! return lower 32-bits in %o1
650	retl
651	srlx	%o2, 32, %o0		! return upper 32-bits in %o0
652	SET_SIZE(atomic_cas_64)
653
654	ENTRY(atomic_swap_8)
655	ALTENTRY(atomic_swap_uchar)
656	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
657	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
658	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
659	set	0xff, %o3		! %o3 = mask
660	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
661	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
662	and	%o1, %o3, %o1		! %o1 = single byte value
663	andn	%o0, 0x3, %o0		! %o0 = word address
664	ld	[%o0], %o2		! read old value
6651:
666	andn	%o2, %o3, %o5		! clear target bits
667	or	%o5, %o1, %o5		! insert the new value
668	cas	[%o0], %o2, %o5
669	cmp	%o2, %o5
670	bne,a,pn %icc, 1b
671	  mov	%o5, %o2		! %o2 = old value
672	and	%o5, %o3, %o5
673	retl
674	srl	%o5, %g1, %o0		! %o0 = old value
675	SET_SIZE(atomic_swap_uchar)
676	SET_SIZE(atomic_swap_8)
677
678	ENTRY(atomic_swap_16)
679	ALTENTRY(atomic_swap_ushort)
680	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
681	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
682	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
683	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
684	sethi	%hi(0xffff0000), %o3	! %o3 = mask
685	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
686	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
687	and	%o1, %o3, %o1		! %o1 = single short value
688	andn	%o0, 0x2, %o0		! %o0 = word address
689	! if low-order bit is 1, we will properly get an alignment fault here
690	ld	[%o0], %o2		! read old value
6911:
692	andn	%o2, %o3, %o5		! clear target bits
693	or	%o5, %o1, %o5		! insert the new value
694	cas	[%o0], %o2, %o5
695	cmp	%o2, %o5
696	bne,a,pn %icc, 1b
697	  mov	%o5, %o2		! %o2 = old value
698	and	%o5, %o3, %o5
699	retl
700	srl	%o5, %g1, %o0		! %o0 = old value
701	SET_SIZE(atomic_swap_ushort)
702	SET_SIZE(atomic_swap_16)
703
704	ENTRY(atomic_swap_32)
705	ALTENTRY(atomic_swap_uint)
706	ALTENTRY(atomic_swap_ptr)
707	ALTENTRY(atomic_swap_ulong)
708	ld	[%o0], %o2
7091:
710	mov	%o1, %o3
711	cas	[%o0], %o2, %o3
712	cmp	%o2, %o3
713	bne,a,pn %icc, 1b
714	  mov	%o3, %o2
715	retl
716	mov	%o3, %o0
717	SET_SIZE(atomic_swap_ulong)
718	SET_SIZE(atomic_swap_ptr)
719	SET_SIZE(atomic_swap_uint)
720	SET_SIZE(atomic_swap_32)
721
722	ENTRY(atomic_swap_64)
723	sllx	%o1, 32, %o1		! upper 32 in %o1, lower in %o2
724	srl	%o2, 0, %o2
725	add	%o1, %o2, %o1		! convert 2 32-bit args into 1 64-bit
726	ldx	[%o0], %o2
7271:
728	mov	%o1, %o3
729	casx	[%o0], %o2, %o3
730	cmp	%o2, %o3
731	bne,a,pn %xcc, 1b
732	  mov	%o3, %o2
733	srl	%o3, 0, %o1		! return lower 32-bits in %o1
734	retl
735	srlx	%o3, 32, %o0		! return upper 32-bits in %o0
736	SET_SIZE(atomic_swap_64)
737
738	ENTRY(atomic_set_long_excl)
739	mov	1, %o3
740	slln	%o3, %o1, %o3
741	ldn	[%o0], %o2
7421:
743	andcc	%o2, %o3, %g0		! test if the bit is set
744	bnz,a,pn %ncc, 2f		! if so, then fail out
745	  mov	-1, %o0
746	or	%o2, %o3, %o4		! set the bit, and try to commit it
747	casn	[%o0], %o2, %o4
748	cmp	%o2, %o4
749	bne,a,pn %ncc, 1b		! failed to commit, try again
750	  mov	%o4, %o2
751	mov	%g0, %o0
7522:
753	retl
754	nop
755	SET_SIZE(atomic_set_long_excl)
756
757	ENTRY(atomic_clear_long_excl)
758	mov	1, %o3
759	slln	%o3, %o1, %o3
760	ldn	[%o0], %o2
7611:
762	andncc	%o3, %o2, %g0		! test if the bit is clear
763	bnz,a,pn %ncc, 2f		! if so, then fail out
764	  mov	-1, %o0
765	andn	%o2, %o3, %o4		! clear the bit, and try to commit it
766	casn	[%o0], %o2, %o4
767	cmp	%o2, %o4
768	bne,a,pn %ncc, 1b		! failed to commit, try again
769	  mov	%o4, %o2
770	mov	%g0, %o0
7712:
772	retl
773	nop
774	SET_SIZE(atomic_clear_long_excl)
775
776#if !defined(_KERNEL)
777
778	/*
779	 * Spitfires and Blackbirds have a problem with membars in the
780	 * delay slot (SF_ERRATA_51).  For safety's sake, we assume
781	 * that the whole world needs the workaround.
782	 */
783	ENTRY(membar_enter)
784	membar	#StoreLoad|#StoreStore
785	retl
786	nop
787	SET_SIZE(membar_enter)
788
789	ENTRY(membar_exit)
790	membar	#LoadStore|#StoreStore
791	retl
792	nop
793	SET_SIZE(membar_exit)
794
795	ENTRY(membar_producer)
796	membar	#StoreStore
797	retl
798	nop
799	SET_SIZE(membar_producer)
800
801	ENTRY(membar_consumer)
802	membar	#LoadLoad
803	retl
804	nop
805	SET_SIZE(membar_consumer)
806
807#endif	/* !_KERNEL */
808