xref: /titanic_44/usr/src/common/atomic/sparc/atomic.s (revision 1e4c938b57d1656808e4112127ff1dce3eba5314)
1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21
22/*
23 * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
24 * Use is subject to license terms.
25 */
26
27	.file	"atomic.s"
28
29#include <sys/asm_linkage.h>
30
31#if defined(_KERNEL)
32	/*
33	 * Legacy kernel interfaces; they will go away (eventually).
34	 */
35	ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
36	ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
37	ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
38	ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
39	ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
40	ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
41	ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
42	ANSI_PRAGMA_WEAK2(swapl,atomic_swap_32,function)
43#endif
44
45	/*
46	 * NOTE: If atomic_inc_8 and atomic_inc_8_nv are ever
47	 * separated, you need to also edit the libc sparc platform
48	 * specific mapfile and remove the NODYNSORT attribute
49	 * from atomic_inc_8_nv.
50	 */
51	ENTRY(atomic_inc_8)
52	ALTENTRY(atomic_inc_8_nv)
53	ALTENTRY(atomic_inc_uchar)
54	ALTENTRY(atomic_inc_uchar_nv)
55	ba	add_8
56	  add	%g0, 1, %o1
57	SET_SIZE(atomic_inc_uchar_nv)
58	SET_SIZE(atomic_inc_uchar)
59	SET_SIZE(atomic_inc_8_nv)
60	SET_SIZE(atomic_inc_8)
61
62	/*
63	 * NOTE: If atomic_dec_8 and atomic_dec_8_nv are ever
64	 * separated, you need to also edit the libc sparc platform
65	 * specific mapfile and remove the NODYNSORT attribute
66	 * from atomic_dec_8_nv.
67	 */
68	ENTRY(atomic_dec_8)
69	ALTENTRY(atomic_dec_8_nv)
70	ALTENTRY(atomic_dec_uchar)
71	ALTENTRY(atomic_dec_uchar_nv)
72	ba	add_8
73	  sub	%g0, 1, %o1
74	SET_SIZE(atomic_dec_uchar_nv)
75	SET_SIZE(atomic_dec_uchar)
76	SET_SIZE(atomic_dec_8_nv)
77	SET_SIZE(atomic_dec_8)
78
79	/*
80	 * NOTE: If atomic_add_8 and atomic_add_8_nv are ever
81	 * separated, you need to also edit the libc sparc platform
82	 * specific mapfile and remove the NODYNSORT attribute
83	 * from atomic_add_8_nv.
84	 */
85	ENTRY(atomic_add_8)
86	ALTENTRY(atomic_add_8_nv)
87	ALTENTRY(atomic_add_char)
88	ALTENTRY(atomic_add_char_nv)
89add_8:
90	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
91	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
92	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
93	set	0xff, %o3		! %o3 = mask
94	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
95	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
96	and	%o1, %o3, %o1		! %o1 = single byte value
97	andn	%o0, 0x3, %o0		! %o0 = word address
98	ld	[%o0], %o2		! read old value
991:
100	add	%o2, %o1, %o5		! add value to the old value
101	and	%o5, %o3, %o5		! clear other bits
102	andn	%o2, %o3, %o4		! clear target bits
103	or	%o4, %o5, %o5		! insert the new value
104	cas	[%o0], %o2, %o5
105	cmp	%o2, %o5
106	bne,a,pn %icc, 1b
107	  mov	%o5, %o2		! %o2 = old value
108	add	%o2, %o1, %o5
109	and	%o5, %o3, %o5
110	retl
111	srl	%o5, %g1, %o0		! %o0 = new value
112	SET_SIZE(atomic_add_char_nv)
113	SET_SIZE(atomic_add_char)
114	SET_SIZE(atomic_add_8_nv)
115	SET_SIZE(atomic_add_8)
116
117	/*
118	 * NOTE: If atomic_inc_16 and atomic_inc_16_nv are ever
119	 * separated, you need to also edit the libc sparc platform
120	 * specific mapfile and remove the NODYNSORT attribute
121	 * from atomic_inc_16_nv.
122	 */
123	ENTRY(atomic_inc_16)
124	ALTENTRY(atomic_inc_16_nv)
125	ALTENTRY(atomic_inc_ushort)
126	ALTENTRY(atomic_inc_ushort_nv)
127	ba	add_16
128	  add	%g0, 1, %o1
129	SET_SIZE(atomic_inc_ushort_nv)
130	SET_SIZE(atomic_inc_ushort)
131	SET_SIZE(atomic_inc_16_nv)
132	SET_SIZE(atomic_inc_16)
133
134	/*
135	 * NOTE: If atomic_dec_16 and atomic_dec_16_nv are ever
136	 * separated, you need to also edit the libc sparc platform
137	 * specific mapfile and remove the NODYNSORT attribute
138	 * from atomic_dec_16_nv.
139	 */
140	ENTRY(atomic_dec_16)
141	ALTENTRY(atomic_dec_16_nv)
142	ALTENTRY(atomic_dec_ushort)
143	ALTENTRY(atomic_dec_ushort_nv)
144	ba	add_16
145	  sub	%g0, 1, %o1
146	SET_SIZE(atomic_dec_ushort_nv)
147	SET_SIZE(atomic_dec_ushort)
148	SET_SIZE(atomic_dec_16_nv)
149	SET_SIZE(atomic_dec_16)
150
151	/*
152	 * NOTE: If atomic_add_16 and atomic_add_16_nv are ever
153	 * separated, you need to also edit the libc sparc platform
154	 * specific mapfile and remove the NODYNSORT attribute
155	 * from atomic_add_16_nv.
156	 */
157	ENTRY(atomic_add_16)
158	ALTENTRY(atomic_add_16_nv)
159	ALTENTRY(atomic_add_short)
160	ALTENTRY(atomic_add_short_nv)
161add_16:
162	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
163	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
164	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
165	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
166	sethi	%hi(0xffff0000), %o3	! %o3 = mask
167	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
168	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
169	and	%o1, %o3, %o1		! %o1 = single short value
170	andn	%o0, 0x2, %o0		! %o0 = word address
171	! if low-order bit is 1, we will properly get an alignment fault here
172	ld	[%o0], %o2		! read old value
1731:
174	add	%o1, %o2, %o5		! add value to the old value
175	and	%o5, %o3, %o5		! clear other bits
176	andn	%o2, %o3, %o4		! clear target bits
177	or	%o4, %o5, %o5		! insert the new value
178	cas	[%o0], %o2, %o5
179	cmp	%o2, %o5
180	bne,a,pn %icc, 1b
181	  mov	%o5, %o2		! %o2 = old value
182	add	%o1, %o2, %o5
183	and	%o5, %o3, %o5
184	retl
185	srl	%o5, %g1, %o0		! %o0 = new value
186	SET_SIZE(atomic_add_short_nv)
187	SET_SIZE(atomic_add_short)
188	SET_SIZE(atomic_add_16_nv)
189	SET_SIZE(atomic_add_16)
190
191	/*
192	 * NOTE: If atomic_inc_32 and atomic_inc_32_nv are ever
193	 * separated, you need to also edit the libc sparc platform
194	 * specific mapfile and remove the NODYNSORT attribute
195	 * from atomic_inc_32_nv.
196	 */
197	ENTRY(atomic_inc_32)
198	ALTENTRY(atomic_inc_32_nv)
199	ALTENTRY(atomic_inc_uint)
200	ALTENTRY(atomic_inc_uint_nv)
201	ALTENTRY(atomic_inc_ulong)
202	ALTENTRY(atomic_inc_ulong_nv)
203	ba	add_32
204	  add	%g0, 1, %o1
205	SET_SIZE(atomic_inc_ulong_nv)
206	SET_SIZE(atomic_inc_ulong)
207	SET_SIZE(atomic_inc_uint_nv)
208	SET_SIZE(atomic_inc_uint)
209	SET_SIZE(atomic_inc_32_nv)
210	SET_SIZE(atomic_inc_32)
211
212	/*
213	 * NOTE: If atomic_dec_32 and atomic_dec_32_nv are ever
214	 * separated, you need to also edit the libc sparc platform
215	 * specific mapfile and remove the NODYNSORT attribute
216	 * from atomic_dec_32_nv.
217	 */
218	ENTRY(atomic_dec_32)
219	ALTENTRY(atomic_dec_32_nv)
220	ALTENTRY(atomic_dec_uint)
221	ALTENTRY(atomic_dec_uint_nv)
222	ALTENTRY(atomic_dec_ulong)
223	ALTENTRY(atomic_dec_ulong_nv)
224	ba	add_32
225	  sub	%g0, 1, %o1
226	SET_SIZE(atomic_dec_ulong_nv)
227	SET_SIZE(atomic_dec_ulong)
228	SET_SIZE(atomic_dec_uint_nv)
229	SET_SIZE(atomic_dec_uint)
230	SET_SIZE(atomic_dec_32_nv)
231	SET_SIZE(atomic_dec_32)
232
233	/*
234	 * NOTE: If atomic_add_32 and atomic_add_32_nv are ever
235	 * separated, you need to also edit the libc sparc platform
236	 * specific mapfile and remove the NODYNSORT attribute
237	 * from atomic_add_32_nv.
238	 */
239	ENTRY(atomic_add_32)
240	ALTENTRY(atomic_add_32_nv)
241	ALTENTRY(atomic_add_int)
242	ALTENTRY(atomic_add_int_nv)
243	ALTENTRY(atomic_add_ptr)
244	ALTENTRY(atomic_add_ptr_nv)
245	ALTENTRY(atomic_add_long)
246	ALTENTRY(atomic_add_long_nv)
247add_32:
248	ld	[%o0], %o2
2491:
250	add	%o2, %o1, %o3
251	cas	[%o0], %o2, %o3
252	cmp	%o2, %o3
253	bne,a,pn %icc, 1b
254	  mov	%o3, %o2
255	retl
256	add	%o2, %o1, %o0		! return new value
257	SET_SIZE(atomic_add_long_nv)
258	SET_SIZE(atomic_add_long)
259	SET_SIZE(atomic_add_ptr_nv)
260	SET_SIZE(atomic_add_ptr)
261	SET_SIZE(atomic_add_int_nv)
262	SET_SIZE(atomic_add_int)
263	SET_SIZE(atomic_add_32_nv)
264	SET_SIZE(atomic_add_32)
265
266	/*
267	 * NOTE: If atomic_inc_64 and atomic_inc_64_nv are ever
268	 * separated, you need to also edit the libc sparc platform
269	 * specific mapfile and remove the NODYNSORT attribute
270	 * from atomic_inc_64_nv.
271	 */
272	ENTRY(atomic_inc_64)
273	ALTENTRY(atomic_inc_64_nv)
274	ba	add_64
275	  add	%g0, 1, %o1
276	SET_SIZE(atomic_inc_64_nv)
277	SET_SIZE(atomic_inc_64)
278
279	/*
280	 * NOTE: If atomic_dec_64 and atomic_dec_64_nv are ever
281	 * separated, you need to also edit the libc sparc platform
282	 * specific mapfile and remove the NODYNSORT attribute
283	 * from atomic_dec_64_nv.
284	 */
285	ENTRY(atomic_dec_64)
286	ALTENTRY(atomic_dec_64_nv)
287	ba	add_64
288	  sub	%g0, 1, %o1
289	SET_SIZE(atomic_dec_64_nv)
290	SET_SIZE(atomic_dec_64)
291
292	/*
293	 * NOTE: If atomic_add_64 and atomic_add_64_nv are ever
294	 * separated, you need to also edit the libc sparc platform
295	 * specific mapfile and remove the NODYNSORT attribute
296	 * from atomic_add_64_nv.
297	 */
298	ENTRY(atomic_add_64)
299	ALTENTRY(atomic_add_64_nv)
300	sllx	%o1, 32, %o1		! upper 32 in %o1, lower in %o2
301	srl	%o2, 0, %o2
302	add	%o1, %o2, %o1		! convert 2 32-bit args into 1 64-bit
303add_64:
304	ldx	[%o0], %o2
3051:
306	add	%o2, %o1, %o3
307	casx	[%o0], %o2, %o3
308	cmp	%o2, %o3
309	bne,a,pn %xcc, 1b
310	  mov	%o3, %o2
311	add	%o2, %o1, %o1		! return lower 32-bits in %o1
312	retl
313	srlx	%o1, 32, %o0		! return upper 32-bits in %o0
314	SET_SIZE(atomic_add_64_nv)
315	SET_SIZE(atomic_add_64)
316
317	/*
318	 * NOTE: If atomic_or_8 and atomic_or_8_nv are ever
319	 * separated, you need to also edit the libc sparc platform
320	 * specific mapfile and remove the NODYNSORT attribute
321	 * from atomic_or_8_nv.
322	 */
323	ENTRY(atomic_or_8)
324	ALTENTRY(atomic_or_8_nv)
325	ALTENTRY(atomic_or_uchar)
326	ALTENTRY(atomic_or_uchar_nv)
327	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
328	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
329	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
330	set	0xff, %o3		! %o3 = mask
331	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
332	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
333	and	%o1, %o3, %o1		! %o1 = single byte value
334	andn	%o0, 0x3, %o0		! %o0 = word address
335	ld	[%o0], %o2		! read old value
3361:
337	or	%o2, %o1, %o5		! or in the new value
338	cas	[%o0], %o2, %o5
339	cmp	%o2, %o5
340	bne,a,pn %icc, 1b
341	  mov	%o5, %o2		! %o2 = old value
342	or	%o2, %o1, %o5
343	and	%o5, %o3, %o5
344	retl
345	srl	%o5, %g1, %o0		! %o0 = new value
346	SET_SIZE(atomic_or_uchar_nv)
347	SET_SIZE(atomic_or_uchar)
348	SET_SIZE(atomic_or_8_nv)
349	SET_SIZE(atomic_or_8)
350
351	/*
352	 * NOTE: If atomic_or_16 and atomic_or_16_nv are ever
353	 * separated, you need to also edit the libc sparc platform
354	 * specific mapfile and remove the NODYNSORT attribute
355	 * from atomic_or_16_nv.
356	 */
357	ENTRY(atomic_or_16)
358	ALTENTRY(atomic_or_16_nv)
359	ALTENTRY(atomic_or_ushort)
360	ALTENTRY(atomic_or_ushort_nv)
361	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
362	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
363	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
364	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
365	sethi	%hi(0xffff0000), %o3	! %o3 = mask
366	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
367	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
368	and	%o1, %o3, %o1		! %o1 = single short value
369	andn	%o0, 0x2, %o0		! %o0 = word address
370	! if low-order bit is 1, we will properly get an alignment fault here
371	ld	[%o0], %o2		! read old value
3721:
373	or	%o2, %o1, %o5		! or in the new value
374	cas	[%o0], %o2, %o5
375	cmp	%o2, %o5
376	bne,a,pn %icc, 1b
377	  mov	%o5, %o2		! %o2 = old value
378	or	%o2, %o1, %o5		! or in the new value
379	and	%o5, %o3, %o5
380	retl
381	srl	%o5, %g1, %o0		! %o0 = new value
382	SET_SIZE(atomic_or_ushort_nv)
383	SET_SIZE(atomic_or_ushort)
384	SET_SIZE(atomic_or_16_nv)
385	SET_SIZE(atomic_or_16)
386
387	/*
388	 * NOTE: If atomic_or_32 and atomic_or_32_nv are ever
389	 * separated, you need to also edit the libc sparc platform
390	 * specific mapfile and remove the NODYNSORT attribute
391	 * from atomic_or_32_nv.
392	 */
393	ENTRY(atomic_or_32)
394	ALTENTRY(atomic_or_32_nv)
395	ALTENTRY(atomic_or_uint)
396	ALTENTRY(atomic_or_uint_nv)
397	ALTENTRY(atomic_or_ulong)
398	ALTENTRY(atomic_or_ulong_nv)
399	ld	[%o0], %o2
4001:
401	or	%o2, %o1, %o3
402	cas	[%o0], %o2, %o3
403	cmp	%o2, %o3
404	bne,a,pn %icc, 1b
405	  mov	%o3, %o2
406	retl
407	or	%o2, %o1, %o0		! return new value
408	SET_SIZE(atomic_or_ulong_nv)
409	SET_SIZE(atomic_or_ulong)
410	SET_SIZE(atomic_or_uint_nv)
411	SET_SIZE(atomic_or_uint)
412	SET_SIZE(atomic_or_32_nv)
413	SET_SIZE(atomic_or_32)
414
415	/*
416	 * NOTE: If atomic_or_64 and atomic_or_64_nv are ever
417	 * separated, you need to also edit the libc sparc platform
418	 * specific mapfile and remove the NODYNSORT attribute
419	 * from atomic_or_64_nv.
420	 */
421	ENTRY(atomic_or_64)
422	ALTENTRY(atomic_or_64_nv)
423	sllx	%o1, 32, %o1		! upper 32 in %o1, lower in %o2
424	srl	%o2, 0, %o2
425	add	%o1, %o2, %o1		! convert 2 32-bit args into 1 64-bit
426	ldx	[%o0], %o2
4271:
428	or	%o2, %o1, %o3
429	casx	[%o0], %o2, %o3
430	cmp	%o2, %o3
431	bne,a,pn %xcc, 1b
432	  mov	%o3, %o2
433	or	%o2, %o1, %o1		! return lower 32-bits in %o1
434	retl
435	srlx	%o1, 32, %o0		! return upper 32-bits in %o0
436	SET_SIZE(atomic_or_64_nv)
437	SET_SIZE(atomic_or_64)
438
439	/*
440	 * NOTE: If atomic_and_8 and atomic_and_8_nv are ever
441	 * separated, you need to also edit the libc sparc platform
442	 * specific mapfile and remove the NODYNSORT attribute
443	 * from atomic_and_8_nv.
444	 */
445	ENTRY(atomic_and_8)
446	ALTENTRY(atomic_and_8_nv)
447	ALTENTRY(atomic_and_uchar)
448	ALTENTRY(atomic_and_uchar_nv)
449	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
450	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
451	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
452	set	0xff, %o3		! %o3 = mask
453	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
454	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
455	orn	%o1, %o3, %o1		! all ones in other bytes
456	andn	%o0, 0x3, %o0		! %o0 = word address
457	ld	[%o0], %o2		! read old value
4581:
459	and	%o2, %o1, %o5		! and in the new value
460	cas	[%o0], %o2, %o5
461	cmp	%o2, %o5
462	bne,a,pn %icc, 1b
463	  mov	%o5, %o2		! %o2 = old value
464	and	%o2, %o1, %o5
465	and	%o5, %o3, %o5
466	retl
467	srl	%o5, %g1, %o0		! %o0 = new value
468	SET_SIZE(atomic_and_uchar_nv)
469	SET_SIZE(atomic_and_uchar)
470	SET_SIZE(atomic_and_8_nv)
471	SET_SIZE(atomic_and_8)
472
473	/*
474	 * NOTE: If atomic_and_16 and atomic_and_16_nv are ever
475	 * separated, you need to also edit the libc sparc platform
476	 * specific mapfile and remove the NODYNSORT attribute
477	 * from atomic_and_16_nv.
478	 */
479	ENTRY(atomic_and_16)
480	ALTENTRY(atomic_and_16_nv)
481	ALTENTRY(atomic_and_ushort)
482	ALTENTRY(atomic_and_ushort_nv)
483	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
484	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
485	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
486	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
487	sethi	%hi(0xffff0000), %o3	! %o3 = mask
488	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
489	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
490	orn	%o1, %o3, %o1		! all ones in the other half
491	andn	%o0, 0x2, %o0		! %o0 = word address
492	! if low-order bit is 1, we will properly get an alignment fault here
493	ld	[%o0], %o2		! read old value
4941:
495	and	%o2, %o1, %o5		! and in the new value
496	cas	[%o0], %o2, %o5
497	cmp	%o2, %o5
498	bne,a,pn %icc, 1b
499	  mov	%o5, %o2		! %o2 = old value
500	and	%o2, %o1, %o5
501	and	%o5, %o3, %o5
502	retl
503	srl	%o5, %g1, %o0		! %o0 = new value
504	SET_SIZE(atomic_and_ushort_nv)
505	SET_SIZE(atomic_and_ushort)
506	SET_SIZE(atomic_and_16_nv)
507	SET_SIZE(atomic_and_16)
508
509	/*
510	 * NOTE: If atomic_and_32 and atomic_and_32_nv are ever
511	 * separated, you need to also edit the libc sparc platform
512	 * specific mapfile and remove the NODYNSORT attribute
513	 * from atomic_and_32_nv.
514	 */
515	ENTRY(atomic_and_32)
516	ALTENTRY(atomic_and_32_nv)
517	ALTENTRY(atomic_and_uint)
518	ALTENTRY(atomic_and_uint_nv)
519	ALTENTRY(atomic_and_ulong)
520	ALTENTRY(atomic_and_ulong_nv)
521	ld	[%o0], %o2
5221:
523	and	%o2, %o1, %o3
524	cas	[%o0], %o2, %o3
525	cmp	%o2, %o3
526	bne,a,pn %icc, 1b
527	  mov	%o3, %o2
528	retl
529	and	%o2, %o1, %o0		! return new value
530	SET_SIZE(atomic_and_ulong_nv)
531	SET_SIZE(atomic_and_ulong)
532	SET_SIZE(atomic_and_uint_nv)
533	SET_SIZE(atomic_and_uint)
534	SET_SIZE(atomic_and_32_nv)
535	SET_SIZE(atomic_and_32)
536
537	/*
538	 * NOTE: If atomic_and_64 and atomic_and_64_nv are ever
539	 * separated, you need to also edit the libc sparc platform
540	 * specific mapfile and remove the NODYNSORT attribute
541	 * from atomic_and_64_nv.
542	 */
543	ENTRY(atomic_and_64)
544	ALTENTRY(atomic_and_64_nv)
545	sllx	%o1, 32, %o1		! upper 32 in %o1, lower in %o2
546	srl	%o2, 0, %o2
547	add	%o1, %o2, %o1		! convert 2 32-bit args into 1 64-bit
548	ldx	[%o0], %o2
5491:
550	and	%o2, %o1, %o3
551	casx	[%o0], %o2, %o3
552	cmp	%o2, %o3
553	bne,a,pn %xcc, 1b
554	  mov	%o3, %o2
555	and	%o2, %o1, %o1		! return lower 32-bits in %o1
556	retl
557	srlx	%o1, 32, %o0		! return upper 32-bits in %o0
558	SET_SIZE(atomic_and_64_nv)
559	SET_SIZE(atomic_and_64)
560
561	ENTRY(atomic_cas_8)
562	ALTENTRY(atomic_cas_uchar)
563	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
564	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
565	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
566	set	0xff, %o3		! %o3 = mask
567	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
568	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
569	and	%o1, %o3, %o1		! %o1 = single byte value
570	sll	%o2, %g1, %o2		! %o2 = shifted to bit offset
571	and	%o2, %o3, %o2		! %o2 = single byte value
572	andn	%o0, 0x3, %o0		! %o0 = word address
573	ld	[%o0], %o4		! read old value
5741:
575	andn	%o4, %o3, %o4		! clear target bits
576	or	%o4, %o2, %o5		! insert the new value
577	or	%o4, %o1, %o4		! insert the comparison value
578	cas	[%o0], %o4, %o5
579	cmp	%o4, %o5		! did we succeed?
580	be,pt	%icc, 2f
581	  and	%o5, %o3, %o4		! isolate the old value
582	cmp	%o1, %o4		! should we have succeeded?
583	be,a,pt	%icc, 1b		! yes, try again
584	  mov	%o5, %o4		! %o4 = old value
5852:
586	retl
587	srl	%o4, %g1, %o0		! %o0 = old value
588	SET_SIZE(atomic_cas_uchar)
589	SET_SIZE(atomic_cas_8)
590
591	ENTRY(atomic_cas_16)
592	ALTENTRY(atomic_cas_ushort)
593	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
594	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
595	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
596	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
597	sethi	%hi(0xffff0000), %o3	! %o3 = mask
598	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
599	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
600	and	%o1, %o3, %o1		! %o1 = single short value
601	sll	%o2, %g1, %o2		! %o2 = shifted to bit offset
602	and	%o2, %o3, %o2		! %o2 = single short value
603	andn	%o0, 0x2, %o0		! %o0 = word address
604	! if low-order bit is 1, we will properly get an alignment fault here
605	ld	[%o0], %o4		! read old value
6061:
607	andn	%o4, %o3, %o4		! clear target bits
608	or	%o4, %o2, %o5		! insert the new value
609	or	%o4, %o1, %o4		! insert the comparison value
610	cas	[%o0], %o4, %o5
611	cmp	%o4, %o5		! did we succeed?
612	be,pt	%icc, 2f
613	  and	%o5, %o3, %o4		! isolate the old value
614	cmp	%o1, %o4		! should we have succeeded?
615	be,a,pt	%icc, 1b		! yes, try again
616	  mov	%o5, %o4		! %o4 = old value
6172:
618	retl
619	srl	%o4, %g1, %o0		! %o0 = old value
620	SET_SIZE(atomic_cas_ushort)
621	SET_SIZE(atomic_cas_16)
622
623	ENTRY(atomic_cas_32)
624	ALTENTRY(atomic_cas_uint)
625	ALTENTRY(atomic_cas_ptr)
626	ALTENTRY(atomic_cas_ulong)
627	cas	[%o0], %o1, %o2
628	retl
629	mov	%o2, %o0
630	SET_SIZE(atomic_cas_ulong)
631	SET_SIZE(atomic_cas_ptr)
632	SET_SIZE(atomic_cas_uint)
633	SET_SIZE(atomic_cas_32)
634
635	ENTRY(atomic_cas_64)
636	sllx	%o1, 32, %o1		! cmp's upper 32 in %o1, lower in %o2
637	srl	%o2, 0, %o2		! convert 2 32-bit args into 1 64-bit
638	add	%o1, %o2, %o1
639	sllx	%o3, 32, %o2		! newval upper 32 in %o3, lower in %o4
640	srl	%o4, 0, %o4		! setup %o2 to have newval
641	add	%o2, %o4, %o2
642	casx	[%o0], %o1, %o2
643	srl	%o2, 0, %o1		! return lower 32-bits in %o1
644	retl
645	srlx	%o2, 32, %o0		! return upper 32-bits in %o0
646	SET_SIZE(atomic_cas_64)
647
648	ENTRY(atomic_swap_8)
649	ALTENTRY(atomic_swap_uchar)
650	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
651	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
652	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
653	set	0xff, %o3		! %o3 = mask
654	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
655	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
656	and	%o1, %o3, %o1		! %o1 = single byte value
657	andn	%o0, 0x3, %o0		! %o0 = word address
658	ld	[%o0], %o2		! read old value
6591:
660	andn	%o2, %o3, %o5		! clear target bits
661	or	%o5, %o1, %o5		! insert the new value
662	cas	[%o0], %o2, %o5
663	cmp	%o2, %o5
664	bne,a,pn %icc, 1b
665	  mov	%o5, %o2		! %o2 = old value
666	and	%o5, %o3, %o5
667	retl
668	srl	%o5, %g1, %o0		! %o0 = old value
669	SET_SIZE(atomic_swap_uchar)
670	SET_SIZE(atomic_swap_8)
671
672	ENTRY(atomic_swap_16)
673	ALTENTRY(atomic_swap_ushort)
674	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
675	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
676	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
677	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
678	sethi	%hi(0xffff0000), %o3	! %o3 = mask
679	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
680	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
681	and	%o1, %o3, %o1		! %o1 = single short value
682	andn	%o0, 0x2, %o0		! %o0 = word address
683	! if low-order bit is 1, we will properly get an alignment fault here
684	ld	[%o0], %o2		! read old value
6851:
686	andn	%o2, %o3, %o5		! clear target bits
687	or	%o5, %o1, %o5		! insert the new value
688	cas	[%o0], %o2, %o5
689	cmp	%o2, %o5
690	bne,a,pn %icc, 1b
691	  mov	%o5, %o2		! %o2 = old value
692	and	%o5, %o3, %o5
693	retl
694	srl	%o5, %g1, %o0		! %o0 = old value
695	SET_SIZE(atomic_swap_ushort)
696	SET_SIZE(atomic_swap_16)
697
698	ENTRY(atomic_swap_32)
699	ALTENTRY(atomic_swap_uint)
700	ALTENTRY(atomic_swap_ptr)
701	ALTENTRY(atomic_swap_ulong)
702	ld	[%o0], %o2
7031:
704	mov	%o1, %o3
705	cas	[%o0], %o2, %o3
706	cmp	%o2, %o3
707	bne,a,pn %icc, 1b
708	  mov	%o3, %o2
709	retl
710	mov	%o3, %o0
711	SET_SIZE(atomic_swap_ulong)
712	SET_SIZE(atomic_swap_ptr)
713	SET_SIZE(atomic_swap_uint)
714	SET_SIZE(atomic_swap_32)
715
716	ENTRY(atomic_swap_64)
717	sllx	%o1, 32, %o1		! upper 32 in %o1, lower in %o2
718	srl	%o2, 0, %o2
719	add	%o1, %o2, %o1		! convert 2 32-bit args into 1 64-bit
720	ldx	[%o0], %o2
7211:
722	mov	%o1, %o3
723	casx	[%o0], %o2, %o3
724	cmp	%o2, %o3
725	bne,a,pn %xcc, 1b
726	  mov	%o3, %o2
727	srl	%o3, 0, %o1		! return lower 32-bits in %o1
728	retl
729	srlx	%o3, 32, %o0		! return upper 32-bits in %o0
730	SET_SIZE(atomic_swap_64)
731
732	ENTRY(atomic_set_long_excl)
733	mov	1, %o3
734	slln	%o3, %o1, %o3
735	ldn	[%o0], %o2
7361:
737	andcc	%o2, %o3, %g0		! test if the bit is set
738	bnz,a,pn %ncc, 2f		! if so, then fail out
739	  mov	-1, %o0
740	or	%o2, %o3, %o4		! set the bit, and try to commit it
741	casn	[%o0], %o2, %o4
742	cmp	%o2, %o4
743	bne,a,pn %ncc, 1b		! failed to commit, try again
744	  mov	%o4, %o2
745	mov	%g0, %o0
7462:
747	retl
748	nop
749	SET_SIZE(atomic_set_long_excl)
750
751	ENTRY(atomic_clear_long_excl)
752	mov	1, %o3
753	slln	%o3, %o1, %o3
754	ldn	[%o0], %o2
7551:
756	andncc	%o3, %o2, %g0		! test if the bit is clear
757	bnz,a,pn %ncc, 2f		! if so, then fail out
758	  mov	-1, %o0
759	andn	%o2, %o3, %o4		! clear the bit, and try to commit it
760	casn	[%o0], %o2, %o4
761	cmp	%o2, %o4
762	bne,a,pn %ncc, 1b		! failed to commit, try again
763	  mov	%o4, %o2
764	mov	%g0, %o0
7652:
766	retl
767	nop
768	SET_SIZE(atomic_clear_long_excl)
769
770#if !defined(_KERNEL)
771
772	/*
773	 * Spitfires and Blackbirds have a problem with membars in the
774	 * delay slot (SF_ERRATA_51).  For safety's sake, we assume
775	 * that the whole world needs the workaround.
776	 */
777	ENTRY(membar_enter)
778	membar	#StoreLoad|#StoreStore
779	retl
780	nop
781	SET_SIZE(membar_enter)
782
783	ENTRY(membar_exit)
784	membar	#LoadStore|#StoreStore
785	retl
786	nop
787	SET_SIZE(membar_exit)
788
789	ENTRY(membar_producer)
790	membar	#StoreStore
791	retl
792	nop
793	SET_SIZE(membar_producer)
794
795	ENTRY(membar_consumer)
796	membar	#LoadLoad
797	retl
798	nop
799	SET_SIZE(membar_consumer)
800
801#endif	/* !_KERNEL */
802