xref: /titanic_41/usr/src/common/atomic/sparcv9/atomic.s (revision 45916cd2fec6e79bca5dee0421bd39e3c2910d1e)
1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License, Version 1.0 only
6 * (the "License").  You may not use this file except in compliance
7 * with the License.
8 *
9 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10 * or http://www.opensolaris.org/os/licensing.
11 * See the License for the specific language governing permissions
12 * and limitations under the License.
13 *
14 * When distributing Covered Code, include this CDDL HEADER in each
15 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16 * If applicable, add the following below this CDDL HEADER, with the
17 * fields enclosed by brackets "[]" replaced with your own identifying
18 * information: Portions Copyright [yyyy] [name of copyright owner]
19 *
20 * CDDL HEADER END
21 */
22/*
23 * Copyright 2005 Sun Microsystems, Inc.  All rights reserved.
24 * Use is subject to license terms.
25 */
26
27	.ident	"%Z%%M%	%I%	%E% SMI"
28
29	.file	"%M%"
30
31#include <sys/asm_linkage.h>
32
33#if defined(_KERNEL)
34	/*
35	 * Legacy kernel interfaces; they will go away (eventually).
36	 */
37	ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
38	ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
39	ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
40	ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
41	ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
42	ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
43	ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
44	ANSI_PRAGMA_WEAK2(swapl,atomic_swap_32,function)
45#else
46	/*
47	 * Include the definitions for the libc weak aliases.
48	 */
49#include "../atomic_asm_weak.h"
50#endif
51
52	ENTRY(atomic_inc_8)
53	ALTENTRY(atomic_inc_8_nv)
54	ALTENTRY(atomic_inc_uchar)
55	ALTENTRY(atomic_inc_uchar_nv)
56	ba	add_8
57	  add	%g0, 1, %o1
58	SET_SIZE(atomic_inc_uchar_nv)
59	SET_SIZE(atomic_inc_uchar)
60	SET_SIZE(atomic_inc_8_nv)
61	SET_SIZE(atomic_inc_8)
62
63	ENTRY(atomic_dec_8)
64	ALTENTRY(atomic_dec_8_nv)
65	ALTENTRY(atomic_dec_uchar)
66	ALTENTRY(atomic_dec_uchar_nv)
67	ba	add_8
68	  sub	%g0, 1, %o1
69	SET_SIZE(atomic_dec_uchar_nv)
70	SET_SIZE(atomic_dec_uchar)
71	SET_SIZE(atomic_dec_8_nv)
72	SET_SIZE(atomic_dec_8)
73
74	ENTRY(atomic_add_8)
75	ALTENTRY(atomic_add_8_nv)
76	ALTENTRY(atomic_add_char)
77	ALTENTRY(atomic_add_char_nv)
78add_8:
79	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
80	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
81	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
82	set	0xff, %o3		! %o3 = mask
83	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
84	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
85	and	%o1, %o3, %o1		! %o1 = single byte value
86	andn	%o0, 0x3, %o0		! %o0 = word address
87	ld	[%o0], %o2		! read old value
881:
89	add	%o2, %o1, %o5		! add value to the old value
90	and	%o5, %o3, %o5		! clear other bits
91	andn	%o2, %o3, %o4		! clear target bits
92	or	%o4, %o5, %o5		! insert the new value
93	cas	[%o0], %o2, %o5
94	cmp	%o2, %o5
95	bne,a,pn %icc, 1b
96	  mov	%o5, %o2		! %o2 = old value
97	add	%o2, %o1, %o5
98	and	%o5, %o3, %o5
99	retl
100	srl	%o5, %g1, %o0		! %o0 = new value
101	SET_SIZE(atomic_add_char_nv)
102	SET_SIZE(atomic_add_char)
103	SET_SIZE(atomic_add_8_nv)
104	SET_SIZE(atomic_add_8)
105
106	ENTRY(atomic_inc_16)
107	ALTENTRY(atomic_inc_16_nv)
108	ALTENTRY(atomic_inc_ushort)
109	ALTENTRY(atomic_inc_ushort_nv)
110	ba	add_16
111	  add	%g0, 1, %o1
112	SET_SIZE(atomic_inc_ushort_nv)
113	SET_SIZE(atomic_inc_ushort)
114	SET_SIZE(atomic_inc_16_nv)
115	SET_SIZE(atomic_inc_16)
116
117	ENTRY(atomic_dec_16)
118	ALTENTRY(atomic_dec_16_nv)
119	ALTENTRY(atomic_dec_ushort)
120	ALTENTRY(atomic_dec_ushort_nv)
121	ba	add_16
122	  sub	%g0, 1, %o1
123	SET_SIZE(atomic_dec_ushort_nv)
124	SET_SIZE(atomic_dec_ushort)
125	SET_SIZE(atomic_dec_16_nv)
126	SET_SIZE(atomic_dec_16)
127
128	ENTRY(atomic_add_16)
129	ALTENTRY(atomic_add_16_nv)
130	ALTENTRY(atomic_add_short)
131	ALTENTRY(atomic_add_short_nv)
132add_16:
133	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
134	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
135	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
136	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
137	sethi	%hi(0xffff0000), %o3	! %o3 = mask
138	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
139	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
140	and	%o1, %o3, %o1		! %o1 = single short value
141	andn	%o0, 0x2, %o0		! %o0 = word address
142	! if low-order bit is 1, we will properly get an alignment fault here
143	ld	[%o0], %o2		! read old value
1441:
145	add	%o1, %o2, %o5		! add value to the old value
146	and	%o5, %o3, %o5		! clear other bits
147	andn	%o2, %o3, %o4		! clear target bits
148	or	%o4, %o5, %o5		! insert the new value
149	cas	[%o0], %o2, %o5
150	cmp	%o2, %o5
151	bne,a,pn %icc, 1b
152	  mov	%o5, %o2		! %o2 = old value
153	add	%o1, %o2, %o5
154	and	%o5, %o3, %o5
155	retl
156	srl	%o5, %g1, %o0		! %o0 = new value
157	SET_SIZE(atomic_add_short_nv)
158	SET_SIZE(atomic_add_short)
159	SET_SIZE(atomic_add_16_nv)
160	SET_SIZE(atomic_add_16)
161
162	ENTRY(atomic_inc_32)
163	ALTENTRY(atomic_inc_32_nv)
164	ALTENTRY(atomic_inc_uint)
165	ALTENTRY(atomic_inc_uint_nv)
166	ba	add_32
167	  add	%g0, 1, %o1
168	SET_SIZE(atomic_inc_uint_nv)
169	SET_SIZE(atomic_inc_uint)
170	SET_SIZE(atomic_inc_32_nv)
171	SET_SIZE(atomic_inc_32)
172
173	ENTRY(atomic_dec_32)
174	ALTENTRY(atomic_dec_32_nv)
175	ALTENTRY(atomic_dec_uint)
176	ALTENTRY(atomic_dec_uint_nv)
177	ba	add_32
178	  sub	%g0, 1, %o1
179	SET_SIZE(atomic_dec_uint_nv)
180	SET_SIZE(atomic_dec_uint)
181	SET_SIZE(atomic_dec_32_nv)
182	SET_SIZE(atomic_dec_32)
183
184	ENTRY(atomic_add_32)
185	ALTENTRY(atomic_add_32_nv)
186	ALTENTRY(atomic_add_int)
187	ALTENTRY(atomic_add_int_nv)
188add_32:
189	ld	[%o0], %o2
1901:
191	add	%o2, %o1, %o3
192	cas	[%o0], %o2, %o3
193	cmp	%o2, %o3
194	bne,a,pn %icc, 1b
195	  mov	%o3, %o2
196	retl
197	add	%o2, %o1, %o0		! return new value
198	SET_SIZE(atomic_add_int_nv)
199	SET_SIZE(atomic_add_int)
200	SET_SIZE(atomic_add_32_nv)
201	SET_SIZE(atomic_add_32)
202
203	ENTRY(atomic_inc_64)
204	ALTENTRY(atomic_inc_64_nv)
205	ALTENTRY(atomic_inc_ulong)
206	ALTENTRY(atomic_inc_ulong_nv)
207	ba	add_64
208	  add	%g0, 1, %o1
209	SET_SIZE(atomic_inc_ulong_nv)
210	SET_SIZE(atomic_inc_ulong)
211	SET_SIZE(atomic_inc_64_nv)
212	SET_SIZE(atomic_inc_64)
213
214	ENTRY(atomic_dec_64)
215	ALTENTRY(atomic_dec_64_nv)
216	ALTENTRY(atomic_dec_ulong)
217	ALTENTRY(atomic_dec_ulong_nv)
218	ba	add_64
219	  sub	%g0, 1, %o1
220	SET_SIZE(atomic_dec_ulong_nv)
221	SET_SIZE(atomic_dec_ulong)
222	SET_SIZE(atomic_dec_64_nv)
223	SET_SIZE(atomic_dec_64)
224
225	ENTRY(atomic_add_64)
226	ALTENTRY(atomic_add_64_nv)
227	ALTENTRY(atomic_add_ptr)
228	ALTENTRY(atomic_add_ptr_nv)
229	ALTENTRY(atomic_add_long)
230	ALTENTRY(atomic_add_long_nv)
231add_64:
232	ldx	[%o0], %o2
2331:
234	add	%o2, %o1, %o3
235	casx	[%o0], %o2, %o3
236	cmp	%o2, %o3
237	bne,a,pn %xcc, 1b
238	  mov	%o3, %o2
239	retl
240	add	%o2, %o1, %o0		! return new value
241	SET_SIZE(atomic_add_long_nv)
242	SET_SIZE(atomic_add_long)
243	SET_SIZE(atomic_add_ptr_nv)
244	SET_SIZE(atomic_add_ptr)
245	SET_SIZE(atomic_add_64_nv)
246	SET_SIZE(atomic_add_64)
247
248	ENTRY(atomic_or_8)
249	ALTENTRY(atomic_or_8_nv)
250	ALTENTRY(atomic_or_uchar)
251	ALTENTRY(atomic_or_uchar_nv)
252	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
253	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
254	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
255	set	0xff, %o3		! %o3 = mask
256	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
257	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
258	and	%o1, %o3, %o1		! %o1 = single byte value
259	andn	%o0, 0x3, %o0		! %o0 = word address
260	ld	[%o0], %o2		! read old value
2611:
262	or	%o2, %o1, %o5		! or in the new value
263	cas	[%o0], %o2, %o5
264	cmp	%o2, %o5
265	bne,a,pn %icc, 1b
266	  mov	%o5, %o2		! %o2 = old value
267	or	%o2, %o1, %o5
268	and	%o5, %o3, %o5
269	retl
270	srl	%o5, %g1, %o0		! %o0 = new value
271	SET_SIZE(atomic_or_uchar_nv)
272	SET_SIZE(atomic_or_uchar)
273	SET_SIZE(atomic_or_8_nv)
274	SET_SIZE(atomic_or_8)
275
276	ENTRY(atomic_or_16)
277	ALTENTRY(atomic_or_16_nv)
278	ALTENTRY(atomic_or_ushort)
279	ALTENTRY(atomic_or_ushort_nv)
280	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
281	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
282	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
283	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
284	sethi	%hi(0xffff0000), %o3	! %o3 = mask
285	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
286	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
287	and	%o1, %o3, %o1		! %o1 = single short value
288	andn	%o0, 0x2, %o0		! %o0 = word address
289	! if low-order bit is 1, we will properly get an alignment fault here
290	ld	[%o0], %o2		! read old value
2911:
292	or	%o2, %o1, %o5		! or in the new value
293	cas	[%o0], %o2, %o5
294	cmp	%o2, %o5
295	bne,a,pn %icc, 1b
296	  mov	%o5, %o2		! %o2 = old value
297	or	%o2, %o1, %o5		! or in the new value
298	and	%o5, %o3, %o5
299	retl
300	srl	%o5, %g1, %o0		! %o0 = new value
301	SET_SIZE(atomic_or_ushort_nv)
302	SET_SIZE(atomic_or_ushort)
303	SET_SIZE(atomic_or_16_nv)
304	SET_SIZE(atomic_or_16)
305
306	ENTRY(atomic_or_32)
307	ALTENTRY(atomic_or_32_nv)
308	ALTENTRY(atomic_or_uint)
309	ALTENTRY(atomic_or_uint_nv)
310	ld	[%o0], %o2
3111:
312	or	%o2, %o1, %o3
313	cas	[%o0], %o2, %o3
314	cmp	%o2, %o3
315	bne,a,pn %icc, 1b
316	  mov	%o3, %o2
317	retl
318	or	%o2, %o1, %o0		! return new value
319	SET_SIZE(atomic_or_uint_nv)
320	SET_SIZE(atomic_or_uint)
321	SET_SIZE(atomic_or_32_nv)
322	SET_SIZE(atomic_or_32)
323
324	ENTRY(atomic_or_64)
325	ALTENTRY(atomic_or_64_nv)
326	ALTENTRY(atomic_or_ulong)
327	ALTENTRY(atomic_or_ulong_nv)
328	ldx	[%o0], %o2
3291:
330	or	%o2, %o1, %o3
331	casx	[%o0], %o2, %o3
332	cmp	%o2, %o3
333	bne,a,pn %xcc, 1b
334	  mov	%o3, %o2
335	retl
336	or	%o2, %o1, %o0		! return new value
337	SET_SIZE(atomic_or_ulong_nv)
338	SET_SIZE(atomic_or_ulong)
339	SET_SIZE(atomic_or_64_nv)
340	SET_SIZE(atomic_or_64)
341
342	ENTRY(atomic_and_8)
343	ALTENTRY(atomic_and_8_nv)
344	ALTENTRY(atomic_and_uchar)
345	ALTENTRY(atomic_and_uchar_nv)
346	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
347	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
348	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
349	set	0xff, %o3		! %o3 = mask
350	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
351	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
352	orn	%o1, %o3, %o1		! all ones in other bytes
353	andn	%o0, 0x3, %o0		! %o0 = word address
354	ld	[%o0], %o2		! read old value
3551:
356	and	%o2, %o1, %o5		! and in the new value
357	cas	[%o0], %o2, %o5
358	cmp	%o2, %o5
359	bne,a,pn %icc, 1b
360	  mov	%o5, %o2		! %o2 = old value
361	and	%o2, %o1, %o5
362	and	%o5, %o3, %o5
363	retl
364	srl	%o5, %g1, %o0		! %o0 = new value
365	SET_SIZE(atomic_and_uchar_nv)
366	SET_SIZE(atomic_and_uchar)
367	SET_SIZE(atomic_and_8_nv)
368	SET_SIZE(atomic_and_8)
369
370	ENTRY(atomic_and_16)
371	ALTENTRY(atomic_and_16_nv)
372	ALTENTRY(atomic_and_ushort)
373	ALTENTRY(atomic_and_ushort_nv)
374	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
375	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
376	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
377	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
378	sethi	%hi(0xffff0000), %o3	! %o3 = mask
379	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
380	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
381	orn	%o1, %o3, %o1		! all ones in the other half
382	andn	%o0, 0x2, %o0		! %o0 = word address
383	! if low-order bit is 1, we will properly get an alignment fault here
384	ld	[%o0], %o2		! read old value
3851:
386	and	%o2, %o1, %o5		! and in the new value
387	cas	[%o0], %o2, %o5
388	cmp	%o2, %o5
389	bne,a,pn %icc, 1b
390	  mov	%o5, %o2		! %o2 = old value
391	and	%o2, %o1, %o5
392	and	%o5, %o3, %o5
393	retl
394	srl	%o5, %g1, %o0		! %o0 = new value
395	SET_SIZE(atomic_and_ushort_nv)
396	SET_SIZE(atomic_and_ushort)
397	SET_SIZE(atomic_and_16_nv)
398	SET_SIZE(atomic_and_16)
399
400	ENTRY(atomic_and_32)
401	ALTENTRY(atomic_and_32_nv)
402	ALTENTRY(atomic_and_uint)
403	ALTENTRY(atomic_and_uint_nv)
404	ld	[%o0], %o2
4051:
406	and	%o2, %o1, %o3
407	cas	[%o0], %o2, %o3
408	cmp	%o2, %o3
409	bne,a,pn %icc, 1b
410	  mov	%o3, %o2
411	retl
412	and	%o2, %o1, %o0		! return new value
413	SET_SIZE(atomic_and_uint_nv)
414	SET_SIZE(atomic_and_uint)
415	SET_SIZE(atomic_and_32_nv)
416	SET_SIZE(atomic_and_32)
417
418	ENTRY(atomic_and_64)
419	ALTENTRY(atomic_and_64_nv)
420	ALTENTRY(atomic_and_ulong)
421	ALTENTRY(atomic_and_ulong_nv)
422	ldx	[%o0], %o2
4231:
424	and	%o2, %o1, %o3
425	casx	[%o0], %o2, %o3
426	cmp	%o2, %o3
427	bne,a,pn %xcc, 1b
428	  mov	%o3, %o2
429	retl
430	and	%o2, %o1, %o0		! return new value
431	SET_SIZE(atomic_and_ulong_nv)
432	SET_SIZE(atomic_and_ulong)
433	SET_SIZE(atomic_and_64_nv)
434	SET_SIZE(atomic_and_64)
435
436	ENTRY(atomic_cas_8)
437	ALTENTRY(atomic_cas_uchar)
438	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
439	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
440	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
441	set	0xff, %o3		! %o3 = mask
442	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
443	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
444	and	%o1, %o3, %o1		! %o1 = single byte value
445	sll	%o2, %g1, %o2		! %o2 = shifted to bit offset
446	and	%o2, %o3, %o2		! %o2 = single byte value
447	andn	%o0, 0x3, %o0		! %o0 = word address
448	ld	[%o0], %o4		! read old value
4491:
450	andn	%o4, %o3, %o4		! clear target bits
451	or	%o4, %o2, %o5		! insert the new value
452	or	%o4, %o1, %o4		! insert the comparison value
453	cas	[%o0], %o4, %o5
454	cmp	%o4, %o5		! did we succeed?
455	be,pt	%icc, 2f
456	  and	%o5, %o3, %o4		! isolate the old value
457	cmp	%o1, %o4		! should we have succeeded?
458	be,a,pt	%icc, 1b		! yes, try again
459	  mov	%o5, %o4		! %o4 = old value
4602:
461	retl
462	srl	%o4, %g1, %o0		! %o0 = old value
463	SET_SIZE(atomic_cas_uchar)
464	SET_SIZE(atomic_cas_8)
465
466	ENTRY(atomic_cas_16)
467	ALTENTRY(atomic_cas_ushort)
468	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
469	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
470	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
471	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
472	sethi	%hi(0xffff0000), %o3	! %o3 = mask
473	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
474	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
475	and	%o1, %o3, %o1		! %o1 = single short value
476	sll	%o2, %g1, %o2		! %o2 = shifted to bit offset
477	and	%o2, %o3, %o2		! %o2 = single short value
478	andn	%o0, 0x2, %o0		! %o0 = word address
479	! if low-order bit is 1, we will properly get an alignment fault here
480	ld	[%o0], %o4		! read old value
4811:
482	andn	%o4, %o3, %o4		! clear target bits
483	or	%o4, %o2, %o5		! insert the new value
484	or	%o4, %o1, %o4		! insert the comparison value
485	cas	[%o0], %o4, %o5
486	cmp	%o4, %o5		! did we succeed?
487	be,pt	%icc, 2f
488	  and	%o5, %o3, %o4		! isolate the old value
489	cmp	%o1, %o4		! should we have succeeded?
490	be,a,pt	%icc, 1b		! yes, try again
491	  mov	%o5, %o4		! %o4 = old value
4922:
493	retl
494	srl	%o4, %g1, %o0		! %o0 = old value
495	SET_SIZE(atomic_cas_ushort)
496	SET_SIZE(atomic_cas_16)
497
498	ENTRY(atomic_cas_32)
499	ALTENTRY(atomic_cas_uint)
500	cas	[%o0], %o1, %o2
501	retl
502	mov	%o2, %o0
503	SET_SIZE(atomic_cas_uint)
504	SET_SIZE(atomic_cas_32)
505
506	ENTRY(atomic_cas_64)
507	ALTENTRY(atomic_cas_ptr)
508	ALTENTRY(atomic_cas_ulong)
509	casx	[%o0], %o1, %o2
510	retl
511	mov	%o2, %o0
512	SET_SIZE(atomic_cas_ulong)
513	SET_SIZE(atomic_cas_ptr)
514	SET_SIZE(atomic_cas_64)
515
516	ENTRY(atomic_swap_8)
517	ALTENTRY(atomic_swap_uchar)
518	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
519	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
520	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
521	set	0xff, %o3		! %o3 = mask
522	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
523	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
524	and	%o1, %o3, %o1		! %o1 = single byte value
525	andn	%o0, 0x3, %o0		! %o0 = word address
526	ld	[%o0], %o2		! read old value
5271:
528	andn	%o2, %o3, %o5		! clear target bits
529	or	%o5, %o1, %o5		! insert the new value
530	cas	[%o0], %o2, %o5
531	cmp	%o2, %o5
532	bne,a,pn %icc, 1b
533	  mov	%o5, %o2		! %o2 = old value
534	and	%o5, %o3, %o5
535	retl
536	srl	%o5, %g1, %o0		! %o0 = old value
537	SET_SIZE(atomic_swap_uchar)
538	SET_SIZE(atomic_swap_8)
539
540	ENTRY(atomic_swap_16)
541	ALTENTRY(atomic_swap_ushort)
542	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
543	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
544	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
545	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
546	sethi	%hi(0xffff0000), %o3	! %o3 = mask
547	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
548	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
549	and	%o1, %o3, %o1		! %o1 = single short value
550	andn	%o0, 0x2, %o0		! %o0 = word address
551	! if low-order bit is 1, we will properly get an alignment fault here
552	ld	[%o0], %o2		! read old value
5531:
554	andn	%o2, %o3, %o5		! clear target bits
555	or	%o5, %o1, %o5		! insert the new value
556	cas	[%o0], %o2, %o5
557	cmp	%o2, %o5
558	bne,a,pn %icc, 1b
559	  mov	%o5, %o2		! %o2 = old value
560	and	%o5, %o3, %o5
561	retl
562	srl	%o5, %g1, %o0		! %o0 = old value
563	SET_SIZE(atomic_swap_ushort)
564	SET_SIZE(atomic_swap_16)
565
566	ENTRY(atomic_swap_32)
567	ALTENTRY(atomic_swap_uint)
568	ld	[%o0], %o2
5691:
570	mov	%o1, %o3
571	cas	[%o0], %o2, %o3
572	cmp	%o2, %o3
573	bne,a,pn %icc, 1b
574	  mov	%o3, %o2
575	retl
576	mov	%o3, %o0
577	SET_SIZE(atomic_swap_uint)
578	SET_SIZE(atomic_swap_32)
579
580	ENTRY(atomic_swap_64)
581	ALTENTRY(atomic_swap_ptr)
582	ALTENTRY(atomic_swap_ulong)
583	ldx	[%o0], %o2
5841:
585	mov	%o1, %o3
586	casx	[%o0], %o2, %o3
587	cmp	%o2, %o3
588	bne,a,pn %xcc, 1b
589	  mov	%o3, %o2
590	retl
591	mov	%o3, %o0
592	SET_SIZE(atomic_swap_ulong)
593	SET_SIZE(atomic_swap_ptr)
594	SET_SIZE(atomic_swap_64)
595
596	ENTRY(atomic_set_long_excl)
597	mov	1, %o3
598	slln	%o3, %o1, %o3
599	ldn	[%o0], %o2
6001:
601	andcc	%o2, %o3, %g0		! test if the bit is set
602	bnz,a,pn %ncc, 2f		! if so, then fail out
603	  mov	-1, %o0
604	or	%o2, %o3, %o4		! set the bit, and try to commit it
605	casn	[%o0], %o2, %o4
606	cmp	%o2, %o4
607	bne,a,pn %ncc, 1b		! failed to commit, try again
608	  mov	%o4, %o2
609	mov	%g0, %o0
6102:
611	retl
612	nop
613	SET_SIZE(atomic_set_long_excl)
614
615	ENTRY(atomic_clear_long_excl)
616	mov	1, %o3
617	slln	%o3, %o1, %o3
618	ldn	[%o0], %o2
6191:
620	andncc	%o3, %o2, %g0		! test if the bit is clear
621	bnz,a,pn %ncc, 2f		! if so, then fail out
622	  mov	-1, %o0
623	andn	%o2, %o3, %o4		! clear the bit, and try to commit it
624	casn	[%o0], %o2, %o4
625	cmp	%o2, %o4
626	bne,a,pn %ncc, 1b		! failed to commit, try again
627	  mov	%o4, %o2
628	mov	%g0, %o0
6292:
630	retl
631	nop
632	SET_SIZE(atomic_clear_long_excl)
633
634#if !defined(_KERNEL)
635
636	/*
637	 * Spitfires and Blackbirds have a problem with membars in the
638	 * delay slot (SF_ERRATA_51).  For safety's sake, we assume
639	 * that the whole world needs the workaround.
640	 */
641	ENTRY(membar_enter)
642	membar	#StoreLoad|#StoreStore
643	retl
644	nop
645	SET_SIZE(membar_enter)
646
647	ENTRY(membar_exit)
648	membar	#LoadStore|#StoreStore
649	retl
650	nop
651	SET_SIZE(membar_exit)
652
653	ENTRY(membar_producer)
654	membar	#StoreStore
655	retl
656	nop
657	SET_SIZE(membar_producer)
658
659	ENTRY(membar_consumer)
660	membar	#LoadLoad
661	retl
662	nop
663	SET_SIZE(membar_consumer)
664
665#endif	/* !_KERNEL */
666