xref: /titanic_52/usr/src/common/atomic/sparc/atomic.s (revision b9bd317cda1afb3a01f4812de73e8cec888cbbd7)
1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21
22/*
23 * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
24 * Use is subject to license terms.
25 */
26
27#pragma ident	"%Z%%M%	%I%	%E% SMI"
28
29	.file	"%M%"
30
31#include <sys/asm_linkage.h>
32
33#if defined(_KERNEL)
34	/*
35	 * Legacy kernel interfaces; they will go away (eventually).
36	 */
37	ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
38	ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
39	ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
40	ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
41	ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
42	ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
43	ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
44	ANSI_PRAGMA_WEAK2(swapl,atomic_swap_32,function)
45#endif
46
47	/*
48	 * NOTE: If atomic_inc_8 and atomic_inc_8_nv are ever
49	 * separated, you need to also edit the libc sparc platform
50	 * specific mapfile and remove the NODYNSORT attribute
51	 * from atomic_inc_8_nv.
52	 */
53	ENTRY(atomic_inc_8)
54	ALTENTRY(atomic_inc_8_nv)
55	ALTENTRY(atomic_inc_uchar)
56	ALTENTRY(atomic_inc_uchar_nv)
57	ba	add_8
58	  add	%g0, 1, %o1
59	SET_SIZE(atomic_inc_uchar_nv)
60	SET_SIZE(atomic_inc_uchar)
61	SET_SIZE(atomic_inc_8_nv)
62	SET_SIZE(atomic_inc_8)
63
64	/*
65	 * NOTE: If atomic_dec_8 and atomic_dec_8_nv are ever
66	 * separated, you need to also edit the libc sparc platform
67	 * specific mapfile and remove the NODYNSORT attribute
68	 * from atomic_dec_8_nv.
69	 */
70	ENTRY(atomic_dec_8)
71	ALTENTRY(atomic_dec_8_nv)
72	ALTENTRY(atomic_dec_uchar)
73	ALTENTRY(atomic_dec_uchar_nv)
74	ba	add_8
75	  sub	%g0, 1, %o1
76	SET_SIZE(atomic_dec_uchar_nv)
77	SET_SIZE(atomic_dec_uchar)
78	SET_SIZE(atomic_dec_8_nv)
79	SET_SIZE(atomic_dec_8)
80
81	/*
82	 * NOTE: If atomic_add_8 and atomic_add_8_nv are ever
83	 * separated, you need to also edit the libc sparc platform
84	 * specific mapfile and remove the NODYNSORT attribute
85	 * from atomic_add_8_nv.
86	 */
87	ENTRY(atomic_add_8)
88	ALTENTRY(atomic_add_8_nv)
89	ALTENTRY(atomic_add_char)
90	ALTENTRY(atomic_add_char_nv)
91add_8:
92	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
93	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
94	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
95	set	0xff, %o3		! %o3 = mask
96	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
97	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
98	and	%o1, %o3, %o1		! %o1 = single byte value
99	andn	%o0, 0x3, %o0		! %o0 = word address
100	ld	[%o0], %o2		! read old value
1011:
102	add	%o2, %o1, %o5		! add value to the old value
103	and	%o5, %o3, %o5		! clear other bits
104	andn	%o2, %o3, %o4		! clear target bits
105	or	%o4, %o5, %o5		! insert the new value
106	cas	[%o0], %o2, %o5
107	cmp	%o2, %o5
108	bne,a,pn %icc, 1b
109	  mov	%o5, %o2		! %o2 = old value
110	add	%o2, %o1, %o5
111	and	%o5, %o3, %o5
112	retl
113	srl	%o5, %g1, %o0		! %o0 = new value
114	SET_SIZE(atomic_add_char_nv)
115	SET_SIZE(atomic_add_char)
116	SET_SIZE(atomic_add_8_nv)
117	SET_SIZE(atomic_add_8)
118
119	/*
120	 * NOTE: If atomic_inc_16 and atomic_inc_16_nv are ever
121	 * separated, you need to also edit the libc sparc platform
122	 * specific mapfile and remove the NODYNSORT attribute
123	 * from atomic_inc_16_nv.
124	 */
125	ENTRY(atomic_inc_16)
126	ALTENTRY(atomic_inc_16_nv)
127	ALTENTRY(atomic_inc_ushort)
128	ALTENTRY(atomic_inc_ushort_nv)
129	ba	add_16
130	  add	%g0, 1, %o1
131	SET_SIZE(atomic_inc_ushort_nv)
132	SET_SIZE(atomic_inc_ushort)
133	SET_SIZE(atomic_inc_16_nv)
134	SET_SIZE(atomic_inc_16)
135
136	/*
137	 * NOTE: If atomic_dec_16 and atomic_dec_16_nv are ever
138	 * separated, you need to also edit the libc sparc platform
139	 * specific mapfile and remove the NODYNSORT attribute
140	 * from atomic_dec_16_nv.
141	 */
142	ENTRY(atomic_dec_16)
143	ALTENTRY(atomic_dec_16_nv)
144	ALTENTRY(atomic_dec_ushort)
145	ALTENTRY(atomic_dec_ushort_nv)
146	ba	add_16
147	  sub	%g0, 1, %o1
148	SET_SIZE(atomic_dec_ushort_nv)
149	SET_SIZE(atomic_dec_ushort)
150	SET_SIZE(atomic_dec_16_nv)
151	SET_SIZE(atomic_dec_16)
152
153	/*
154	 * NOTE: If atomic_add_16 and atomic_add_16_nv are ever
155	 * separated, you need to also edit the libc sparc platform
156	 * specific mapfile and remove the NODYNSORT attribute
157	 * from atomic_add_16_nv.
158	 */
159	ENTRY(atomic_add_16)
160	ALTENTRY(atomic_add_16_nv)
161	ALTENTRY(atomic_add_short)
162	ALTENTRY(atomic_add_short_nv)
163add_16:
164	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
165	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
166	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
167	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
168	sethi	%hi(0xffff0000), %o3	! %o3 = mask
169	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
170	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
171	and	%o1, %o3, %o1		! %o1 = single short value
172	andn	%o0, 0x2, %o0		! %o0 = word address
173	! if low-order bit is 1, we will properly get an alignment fault here
174	ld	[%o0], %o2		! read old value
1751:
176	add	%o1, %o2, %o5		! add value to the old value
177	and	%o5, %o3, %o5		! clear other bits
178	andn	%o2, %o3, %o4		! clear target bits
179	or	%o4, %o5, %o5		! insert the new value
180	cas	[%o0], %o2, %o5
181	cmp	%o2, %o5
182	bne,a,pn %icc, 1b
183	  mov	%o5, %o2		! %o2 = old value
184	add	%o1, %o2, %o5
185	and	%o5, %o3, %o5
186	retl
187	srl	%o5, %g1, %o0		! %o0 = new value
188	SET_SIZE(atomic_add_short_nv)
189	SET_SIZE(atomic_add_short)
190	SET_SIZE(atomic_add_16_nv)
191	SET_SIZE(atomic_add_16)
192
193	/*
194	 * NOTE: If atomic_inc_32 and atomic_inc_32_nv are ever
195	 * separated, you need to also edit the libc sparc platform
196	 * specific mapfile and remove the NODYNSORT attribute
197	 * from atomic_inc_32_nv.
198	 */
199	ENTRY(atomic_inc_32)
200	ALTENTRY(atomic_inc_32_nv)
201	ALTENTRY(atomic_inc_uint)
202	ALTENTRY(atomic_inc_uint_nv)
203	ALTENTRY(atomic_inc_ulong)
204	ALTENTRY(atomic_inc_ulong_nv)
205	ba	add_32
206	  add	%g0, 1, %o1
207	SET_SIZE(atomic_inc_ulong_nv)
208	SET_SIZE(atomic_inc_ulong)
209	SET_SIZE(atomic_inc_uint_nv)
210	SET_SIZE(atomic_inc_uint)
211	SET_SIZE(atomic_inc_32_nv)
212	SET_SIZE(atomic_inc_32)
213
214	/*
215	 * NOTE: If atomic_dec_32 and atomic_dec_32_nv are ever
216	 * separated, you need to also edit the libc sparc platform
217	 * specific mapfile and remove the NODYNSORT attribute
218	 * from atomic_dec_32_nv.
219	 */
220	ENTRY(atomic_dec_32)
221	ALTENTRY(atomic_dec_32_nv)
222	ALTENTRY(atomic_dec_uint)
223	ALTENTRY(atomic_dec_uint_nv)
224	ALTENTRY(atomic_dec_ulong)
225	ALTENTRY(atomic_dec_ulong_nv)
226	ba	add_32
227	  sub	%g0, 1, %o1
228	SET_SIZE(atomic_dec_ulong_nv)
229	SET_SIZE(atomic_dec_ulong)
230	SET_SIZE(atomic_dec_uint_nv)
231	SET_SIZE(atomic_dec_uint)
232	SET_SIZE(atomic_dec_32_nv)
233	SET_SIZE(atomic_dec_32)
234
235	/*
236	 * NOTE: If atomic_add_32 and atomic_add_32_nv are ever
237	 * separated, you need to also edit the libc sparc platform
238	 * specific mapfile and remove the NODYNSORT attribute
239	 * from atomic_add_32_nv.
240	 */
241	ENTRY(atomic_add_32)
242	ALTENTRY(atomic_add_32_nv)
243	ALTENTRY(atomic_add_int)
244	ALTENTRY(atomic_add_int_nv)
245	ALTENTRY(atomic_add_ptr)
246	ALTENTRY(atomic_add_ptr_nv)
247	ALTENTRY(atomic_add_long)
248	ALTENTRY(atomic_add_long_nv)
249add_32:
250	ld	[%o0], %o2
2511:
252	add	%o2, %o1, %o3
253	cas	[%o0], %o2, %o3
254	cmp	%o2, %o3
255	bne,a,pn %icc, 1b
256	  mov	%o3, %o2
257	retl
258	add	%o2, %o1, %o0		! return new value
259	SET_SIZE(atomic_add_long_nv)
260	SET_SIZE(atomic_add_long)
261	SET_SIZE(atomic_add_ptr_nv)
262	SET_SIZE(atomic_add_ptr)
263	SET_SIZE(atomic_add_int_nv)
264	SET_SIZE(atomic_add_int)
265	SET_SIZE(atomic_add_32_nv)
266	SET_SIZE(atomic_add_32)
267
268	/*
269	 * NOTE: If atomic_inc_64 and atomic_inc_64_nv are ever
270	 * separated, you need to also edit the libc sparc platform
271	 * specific mapfile and remove the NODYNSORT attribute
272	 * from atomic_inc_64_nv.
273	 */
274	ENTRY(atomic_inc_64)
275	ALTENTRY(atomic_inc_64_nv)
276	ba	add_64
277	  add	%g0, 1, %o1
278	SET_SIZE(atomic_inc_64_nv)
279	SET_SIZE(atomic_inc_64)
280
281	/*
282	 * NOTE: If atomic_dec_64 and atomic_dec_64_nv are ever
283	 * separated, you need to also edit the libc sparc platform
284	 * specific mapfile and remove the NODYNSORT attribute
285	 * from atomic_dec_64_nv.
286	 */
287	ENTRY(atomic_dec_64)
288	ALTENTRY(atomic_dec_64_nv)
289	ba	add_64
290	  sub	%g0, 1, %o1
291	SET_SIZE(atomic_dec_64_nv)
292	SET_SIZE(atomic_dec_64)
293
294	/*
295	 * NOTE: If atomic_add_64 and atomic_add_64_nv are ever
296	 * separated, you need to also edit the libc sparc platform
297	 * specific mapfile and remove the NODYNSORT attribute
298	 * from atomic_add_64_nv.
299	 */
300	ENTRY(atomic_add_64)
301	ALTENTRY(atomic_add_64_nv)
302	sllx	%o1, 32, %o1		! upper 32 in %o1, lower in %o2
303	srl	%o2, 0, %o2
304	add	%o1, %o2, %o1		! convert 2 32-bit args into 1 64-bit
305add_64:
306	ldx	[%o0], %o2
3071:
308	add	%o2, %o1, %o3
309	casx	[%o0], %o2, %o3
310	cmp	%o2, %o3
311	bne,a,pn %xcc, 1b
312	  mov	%o3, %o2
313	add	%o2, %o1, %o1		! return lower 32-bits in %o1
314	retl
315	srlx	%o1, 32, %o0		! return upper 32-bits in %o0
316	SET_SIZE(atomic_add_64_nv)
317	SET_SIZE(atomic_add_64)
318
319	/*
320	 * NOTE: If atomic_or_8 and atomic_or_8_nv are ever
321	 * separated, you need to also edit the libc sparc platform
322	 * specific mapfile and remove the NODYNSORT attribute
323	 * from atomic_or_8_nv.
324	 */
325	ENTRY(atomic_or_8)
326	ALTENTRY(atomic_or_8_nv)
327	ALTENTRY(atomic_or_uchar)
328	ALTENTRY(atomic_or_uchar_nv)
329	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
330	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
331	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
332	set	0xff, %o3		! %o3 = mask
333	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
334	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
335	and	%o1, %o3, %o1		! %o1 = single byte value
336	andn	%o0, 0x3, %o0		! %o0 = word address
337	ld	[%o0], %o2		! read old value
3381:
339	or	%o2, %o1, %o5		! or in the new value
340	cas	[%o0], %o2, %o5
341	cmp	%o2, %o5
342	bne,a,pn %icc, 1b
343	  mov	%o5, %o2		! %o2 = old value
344	or	%o2, %o1, %o5
345	and	%o5, %o3, %o5
346	retl
347	srl	%o5, %g1, %o0		! %o0 = new value
348	SET_SIZE(atomic_or_uchar_nv)
349	SET_SIZE(atomic_or_uchar)
350	SET_SIZE(atomic_or_8_nv)
351	SET_SIZE(atomic_or_8)
352
353	/*
354	 * NOTE: If atomic_or_16 and atomic_or_16_nv are ever
355	 * separated, you need to also edit the libc sparc platform
356	 * specific mapfile and remove the NODYNSORT attribute
357	 * from atomic_or_16_nv.
358	 */
359	ENTRY(atomic_or_16)
360	ALTENTRY(atomic_or_16_nv)
361	ALTENTRY(atomic_or_ushort)
362	ALTENTRY(atomic_or_ushort_nv)
363	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
364	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
365	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
366	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
367	sethi	%hi(0xffff0000), %o3	! %o3 = mask
368	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
369	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
370	and	%o1, %o3, %o1		! %o1 = single short value
371	andn	%o0, 0x2, %o0		! %o0 = word address
372	! if low-order bit is 1, we will properly get an alignment fault here
373	ld	[%o0], %o2		! read old value
3741:
375	or	%o2, %o1, %o5		! or in the new value
376	cas	[%o0], %o2, %o5
377	cmp	%o2, %o5
378	bne,a,pn %icc, 1b
379	  mov	%o5, %o2		! %o2 = old value
380	or	%o2, %o1, %o5		! or in the new value
381	and	%o5, %o3, %o5
382	retl
383	srl	%o5, %g1, %o0		! %o0 = new value
384	SET_SIZE(atomic_or_ushort_nv)
385	SET_SIZE(atomic_or_ushort)
386	SET_SIZE(atomic_or_16_nv)
387	SET_SIZE(atomic_or_16)
388
389	/*
390	 * NOTE: If atomic_or_32 and atomic_or_32_nv are ever
391	 * separated, you need to also edit the libc sparc platform
392	 * specific mapfile and remove the NODYNSORT attribute
393	 * from atomic_or_32_nv.
394	 */
395	ENTRY(atomic_or_32)
396	ALTENTRY(atomic_or_32_nv)
397	ALTENTRY(atomic_or_uint)
398	ALTENTRY(atomic_or_uint_nv)
399	ALTENTRY(atomic_or_ulong)
400	ALTENTRY(atomic_or_ulong_nv)
401	ld	[%o0], %o2
4021:
403	or	%o2, %o1, %o3
404	cas	[%o0], %o2, %o3
405	cmp	%o2, %o3
406	bne,a,pn %icc, 1b
407	  mov	%o3, %o2
408	retl
409	or	%o2, %o1, %o0		! return new value
410	SET_SIZE(atomic_or_ulong_nv)
411	SET_SIZE(atomic_or_ulong)
412	SET_SIZE(atomic_or_uint_nv)
413	SET_SIZE(atomic_or_uint)
414	SET_SIZE(atomic_or_32_nv)
415	SET_SIZE(atomic_or_32)
416
417	/*
418	 * NOTE: If atomic_or_64 and atomic_or_64_nv are ever
419	 * separated, you need to also edit the libc sparc platform
420	 * specific mapfile and remove the NODYNSORT attribute
421	 * from atomic_or_64_nv.
422	 */
423	ENTRY(atomic_or_64)
424	ALTENTRY(atomic_or_64_nv)
425	sllx	%o1, 32, %o1		! upper 32 in %o1, lower in %o2
426	srl	%o2, 0, %o2
427	add	%o1, %o2, %o1		! convert 2 32-bit args into 1 64-bit
428	ldx	[%o0], %o2
4291:
430	or	%o2, %o1, %o3
431	casx	[%o0], %o2, %o3
432	cmp	%o2, %o3
433	bne,a,pn %xcc, 1b
434	  mov	%o3, %o2
435	or	%o2, %o1, %o1		! return lower 32-bits in %o1
436	retl
437	srlx	%o1, 32, %o0		! return upper 32-bits in %o0
438	SET_SIZE(atomic_or_64_nv)
439	SET_SIZE(atomic_or_64)
440
441	/*
442	 * NOTE: If atomic_and_8 and atomic_and_8_nv are ever
443	 * separated, you need to also edit the libc sparc platform
444	 * specific mapfile and remove the NODYNSORT attribute
445	 * from atomic_and_8_nv.
446	 */
447	ENTRY(atomic_and_8)
448	ALTENTRY(atomic_and_8_nv)
449	ALTENTRY(atomic_and_uchar)
450	ALTENTRY(atomic_and_uchar_nv)
451	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
452	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
453	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
454	set	0xff, %o3		! %o3 = mask
455	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
456	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
457	orn	%o1, %o3, %o1		! all ones in other bytes
458	andn	%o0, 0x3, %o0		! %o0 = word address
459	ld	[%o0], %o2		! read old value
4601:
461	and	%o2, %o1, %o5		! and in the new value
462	cas	[%o0], %o2, %o5
463	cmp	%o2, %o5
464	bne,a,pn %icc, 1b
465	  mov	%o5, %o2		! %o2 = old value
466	and	%o2, %o1, %o5
467	and	%o5, %o3, %o5
468	retl
469	srl	%o5, %g1, %o0		! %o0 = new value
470	SET_SIZE(atomic_and_uchar_nv)
471	SET_SIZE(atomic_and_uchar)
472	SET_SIZE(atomic_and_8_nv)
473	SET_SIZE(atomic_and_8)
474
475	/*
476	 * NOTE: If atomic_and_16 and atomic_and_16_nv are ever
477	 * separated, you need to also edit the libc sparc platform
478	 * specific mapfile and remove the NODYNSORT attribute
479	 * from atomic_and_16_nv.
480	 */
481	ENTRY(atomic_and_16)
482	ALTENTRY(atomic_and_16_nv)
483	ALTENTRY(atomic_and_ushort)
484	ALTENTRY(atomic_and_ushort_nv)
485	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
486	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
487	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
488	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
489	sethi	%hi(0xffff0000), %o3	! %o3 = mask
490	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
491	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
492	orn	%o1, %o3, %o1		! all ones in the other half
493	andn	%o0, 0x2, %o0		! %o0 = word address
494	! if low-order bit is 1, we will properly get an alignment fault here
495	ld	[%o0], %o2		! read old value
4961:
497	and	%o2, %o1, %o5		! and in the new value
498	cas	[%o0], %o2, %o5
499	cmp	%o2, %o5
500	bne,a,pn %icc, 1b
501	  mov	%o5, %o2		! %o2 = old value
502	and	%o2, %o1, %o5
503	and	%o5, %o3, %o5
504	retl
505	srl	%o5, %g1, %o0		! %o0 = new value
506	SET_SIZE(atomic_and_ushort_nv)
507	SET_SIZE(atomic_and_ushort)
508	SET_SIZE(atomic_and_16_nv)
509	SET_SIZE(atomic_and_16)
510
511	/*
512	 * NOTE: If atomic_and_32 and atomic_and_32_nv are ever
513	 * separated, you need to also edit the libc sparc platform
514	 * specific mapfile and remove the NODYNSORT attribute
515	 * from atomic_and_32_nv.
516	 */
517	ENTRY(atomic_and_32)
518	ALTENTRY(atomic_and_32_nv)
519	ALTENTRY(atomic_and_uint)
520	ALTENTRY(atomic_and_uint_nv)
521	ALTENTRY(atomic_and_ulong)
522	ALTENTRY(atomic_and_ulong_nv)
523	ld	[%o0], %o2
5241:
525	and	%o2, %o1, %o3
526	cas	[%o0], %o2, %o3
527	cmp	%o2, %o3
528	bne,a,pn %icc, 1b
529	  mov	%o3, %o2
530	retl
531	and	%o2, %o1, %o0		! return new value
532	SET_SIZE(atomic_and_ulong_nv)
533	SET_SIZE(atomic_and_ulong)
534	SET_SIZE(atomic_and_uint_nv)
535	SET_SIZE(atomic_and_uint)
536	SET_SIZE(atomic_and_32_nv)
537	SET_SIZE(atomic_and_32)
538
539	/*
540	 * NOTE: If atomic_and_64 and atomic_and_64_nv are ever
541	 * separated, you need to also edit the libc sparc platform
542	 * specific mapfile and remove the NODYNSORT attribute
543	 * from atomic_and_64_nv.
544	 */
545	ENTRY(atomic_and_64)
546	ALTENTRY(atomic_and_64_nv)
547	sllx	%o1, 32, %o1		! upper 32 in %o1, lower in %o2
548	srl	%o2, 0, %o2
549	add	%o1, %o2, %o1		! convert 2 32-bit args into 1 64-bit
550	ldx	[%o0], %o2
5511:
552	and	%o2, %o1, %o3
553	casx	[%o0], %o2, %o3
554	cmp	%o2, %o3
555	bne,a,pn %xcc, 1b
556	  mov	%o3, %o2
557	and	%o2, %o1, %o1		! return lower 32-bits in %o1
558	retl
559	srlx	%o1, 32, %o0		! return upper 32-bits in %o0
560	SET_SIZE(atomic_and_64_nv)
561	SET_SIZE(atomic_and_64)
562
563	ENTRY(atomic_cas_8)
564	ALTENTRY(atomic_cas_uchar)
565	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
566	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
567	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
568	set	0xff, %o3		! %o3 = mask
569	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
570	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
571	and	%o1, %o3, %o1		! %o1 = single byte value
572	sll	%o2, %g1, %o2		! %o2 = shifted to bit offset
573	and	%o2, %o3, %o2		! %o2 = single byte value
574	andn	%o0, 0x3, %o0		! %o0 = word address
575	ld	[%o0], %o4		! read old value
5761:
577	andn	%o4, %o3, %o4		! clear target bits
578	or	%o4, %o2, %o5		! insert the new value
579	or	%o4, %o1, %o4		! insert the comparison value
580	cas	[%o0], %o4, %o5
581	cmp	%o4, %o5		! did we succeed?
582	be,pt	%icc, 2f
583	  and	%o5, %o3, %o4		! isolate the old value
584	cmp	%o1, %o4		! should we have succeeded?
585	be,a,pt	%icc, 1b		! yes, try again
586	  mov	%o5, %o4		! %o4 = old value
5872:
588	retl
589	srl	%o4, %g1, %o0		! %o0 = old value
590	SET_SIZE(atomic_cas_uchar)
591	SET_SIZE(atomic_cas_8)
592
593	ENTRY(atomic_cas_16)
594	ALTENTRY(atomic_cas_ushort)
595	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
596	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
597	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
598	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
599	sethi	%hi(0xffff0000), %o3	! %o3 = mask
600	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
601	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
602	and	%o1, %o3, %o1		! %o1 = single short value
603	sll	%o2, %g1, %o2		! %o2 = shifted to bit offset
604	and	%o2, %o3, %o2		! %o2 = single short value
605	andn	%o0, 0x2, %o0		! %o0 = word address
606	! if low-order bit is 1, we will properly get an alignment fault here
607	ld	[%o0], %o4		! read old value
6081:
609	andn	%o4, %o3, %o4		! clear target bits
610	or	%o4, %o2, %o5		! insert the new value
611	or	%o4, %o1, %o4		! insert the comparison value
612	cas	[%o0], %o4, %o5
613	cmp	%o4, %o5		! did we succeed?
614	be,pt	%icc, 2f
615	  and	%o5, %o3, %o4		! isolate the old value
616	cmp	%o1, %o4		! should we have succeeded?
617	be,a,pt	%icc, 1b		! yes, try again
618	  mov	%o5, %o4		! %o4 = old value
6192:
620	retl
621	srl	%o4, %g1, %o0		! %o0 = old value
622	SET_SIZE(atomic_cas_ushort)
623	SET_SIZE(atomic_cas_16)
624
625	ENTRY(atomic_cas_32)
626	ALTENTRY(atomic_cas_uint)
627	ALTENTRY(atomic_cas_ptr)
628	ALTENTRY(atomic_cas_ulong)
629	cas	[%o0], %o1, %o2
630	retl
631	mov	%o2, %o0
632	SET_SIZE(atomic_cas_ulong)
633	SET_SIZE(atomic_cas_ptr)
634	SET_SIZE(atomic_cas_uint)
635	SET_SIZE(atomic_cas_32)
636
637	ENTRY(atomic_cas_64)
638	sllx	%o1, 32, %o1		! cmp's upper 32 in %o1, lower in %o2
639	srl	%o2, 0, %o2		! convert 2 32-bit args into 1 64-bit
640	add	%o1, %o2, %o1
641	sllx	%o3, 32, %o2		! newval upper 32 in %o3, lower in %o4
642	srl	%o4, 0, %o4		! setup %o2 to have newval
643	add	%o2, %o4, %o2
644	casx	[%o0], %o1, %o2
645	srl	%o2, 0, %o1		! return lower 32-bits in %o1
646	retl
647	srlx	%o2, 32, %o0		! return upper 32-bits in %o0
648	SET_SIZE(atomic_cas_64)
649
650	ENTRY(atomic_swap_8)
651	ALTENTRY(atomic_swap_uchar)
652	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
653	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
654	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
655	set	0xff, %o3		! %o3 = mask
656	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
657	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
658	and	%o1, %o3, %o1		! %o1 = single byte value
659	andn	%o0, 0x3, %o0		! %o0 = word address
660	ld	[%o0], %o2		! read old value
6611:
662	andn	%o2, %o3, %o5		! clear target bits
663	or	%o5, %o1, %o5		! insert the new value
664	cas	[%o0], %o2, %o5
665	cmp	%o2, %o5
666	bne,a,pn %icc, 1b
667	  mov	%o5, %o2		! %o2 = old value
668	and	%o5, %o3, %o5
669	retl
670	srl	%o5, %g1, %o0		! %o0 = old value
671	SET_SIZE(atomic_swap_uchar)
672	SET_SIZE(atomic_swap_8)
673
674	ENTRY(atomic_swap_16)
675	ALTENTRY(atomic_swap_ushort)
676	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
677	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
678	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
679	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
680	sethi	%hi(0xffff0000), %o3	! %o3 = mask
681	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
682	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
683	and	%o1, %o3, %o1		! %o1 = single short value
684	andn	%o0, 0x2, %o0		! %o0 = word address
685	! if low-order bit is 1, we will properly get an alignment fault here
686	ld	[%o0], %o2		! read old value
6871:
688	andn	%o2, %o3, %o5		! clear target bits
689	or	%o5, %o1, %o5		! insert the new value
690	cas	[%o0], %o2, %o5
691	cmp	%o2, %o5
692	bne,a,pn %icc, 1b
693	  mov	%o5, %o2		! %o2 = old value
694	and	%o5, %o3, %o5
695	retl
696	srl	%o5, %g1, %o0		! %o0 = old value
697	SET_SIZE(atomic_swap_ushort)
698	SET_SIZE(atomic_swap_16)
699
700	ENTRY(atomic_swap_32)
701	ALTENTRY(atomic_swap_uint)
702	ALTENTRY(atomic_swap_ptr)
703	ALTENTRY(atomic_swap_ulong)
704	ld	[%o0], %o2
7051:
706	mov	%o1, %o3
707	cas	[%o0], %o2, %o3
708	cmp	%o2, %o3
709	bne,a,pn %icc, 1b
710	  mov	%o3, %o2
711	retl
712	mov	%o3, %o0
713	SET_SIZE(atomic_swap_ulong)
714	SET_SIZE(atomic_swap_ptr)
715	SET_SIZE(atomic_swap_uint)
716	SET_SIZE(atomic_swap_32)
717
718	ENTRY(atomic_swap_64)
719	sllx	%o1, 32, %o1		! upper 32 in %o1, lower in %o2
720	srl	%o2, 0, %o2
721	add	%o1, %o2, %o1		! convert 2 32-bit args into 1 64-bit
722	ldx	[%o0], %o2
7231:
724	mov	%o1, %o3
725	casx	[%o0], %o2, %o3
726	cmp	%o2, %o3
727	bne,a,pn %xcc, 1b
728	  mov	%o3, %o2
729	srl	%o3, 0, %o1		! return lower 32-bits in %o1
730	retl
731	srlx	%o3, 32, %o0		! return upper 32-bits in %o0
732	SET_SIZE(atomic_swap_64)
733
734	ENTRY(atomic_set_long_excl)
735	mov	1, %o3
736	slln	%o3, %o1, %o3
737	ldn	[%o0], %o2
7381:
739	andcc	%o2, %o3, %g0		! test if the bit is set
740	bnz,a,pn %ncc, 2f		! if so, then fail out
741	  mov	-1, %o0
742	or	%o2, %o3, %o4		! set the bit, and try to commit it
743	casn	[%o0], %o2, %o4
744	cmp	%o2, %o4
745	bne,a,pn %ncc, 1b		! failed to commit, try again
746	  mov	%o4, %o2
747	mov	%g0, %o0
7482:
749	retl
750	nop
751	SET_SIZE(atomic_set_long_excl)
752
753	ENTRY(atomic_clear_long_excl)
754	mov	1, %o3
755	slln	%o3, %o1, %o3
756	ldn	[%o0], %o2
7571:
758	andncc	%o3, %o2, %g0		! test if the bit is clear
759	bnz,a,pn %ncc, 2f		! if so, then fail out
760	  mov	-1, %o0
761	andn	%o2, %o3, %o4		! clear the bit, and try to commit it
762	casn	[%o0], %o2, %o4
763	cmp	%o2, %o4
764	bne,a,pn %ncc, 1b		! failed to commit, try again
765	  mov	%o4, %o2
766	mov	%g0, %o0
7672:
768	retl
769	nop
770	SET_SIZE(atomic_clear_long_excl)
771
772#if !defined(_KERNEL)
773
774	/*
775	 * Spitfires and Blackbirds have a problem with membars in the
776	 * delay slot (SF_ERRATA_51).  For safety's sake, we assume
777	 * that the whole world needs the workaround.
778	 */
779	ENTRY(membar_enter)
780	membar	#StoreLoad|#StoreStore
781	retl
782	nop
783	SET_SIZE(membar_enter)
784
785	ENTRY(membar_exit)
786	membar	#LoadStore|#StoreStore
787	retl
788	nop
789	SET_SIZE(membar_exit)
790
791	ENTRY(membar_producer)
792	membar	#StoreStore
793	retl
794	nop
795	SET_SIZE(membar_producer)
796
797	ENTRY(membar_consumer)
798	membar	#LoadLoad
799	retl
800	nop
801	SET_SIZE(membar_consumer)
802
803#endif	/* !_KERNEL */
804