xref: /titanic_44/usr/src/common/atomic/i386/atomic.s (revision d0538f66491267879b7418b21ad78e3dcc2dcc83)
1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21/*
22 * Copyright 2007 Sun Microsystems, Inc.  All rights reserved.
23 * Use is subject to license terms.
24 */
25
26	.ident	"%Z%%M%	%I%	%E% SMI"
27
28	.file	"%M%"
29
30#include <sys/asm_linkage.h>
31
32#if defined(_KERNEL)
33	/*
34	 * Legacy kernel interfaces; they will go away (eventually).
35	 */
36	ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
37	ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
38	ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
39	ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
40	ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
41	ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
42	ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
43#else
44	/*
45	 * Include the definitions for the libc weak aliases.
46	 */
47#include "../atomic_asm_weak.h"
48#endif
49
50	ENTRY(atomic_inc_8)
51	ALTENTRY(atomic_inc_uchar)
52	movl	4(%esp), %eax
53	lock
54	incb	(%eax)
55	ret
56	SET_SIZE(atomic_inc_uchar)
57	SET_SIZE(atomic_inc_8)
58
59	ENTRY(atomic_inc_16)
60	ALTENTRY(atomic_inc_ushort)
61	movl	4(%esp), %eax
62	lock
63	incw	(%eax)
64	ret
65	SET_SIZE(atomic_inc_ushort)
66	SET_SIZE(atomic_inc_16)
67
68	ENTRY(atomic_inc_32)
69	ALTENTRY(atomic_inc_uint)
70	ALTENTRY(atomic_inc_ulong)
71	movl	4(%esp), %eax
72	lock
73	incl	(%eax)
74	ret
75	SET_SIZE(atomic_inc_ulong)
76	SET_SIZE(atomic_inc_uint)
77	SET_SIZE(atomic_inc_32)
78
79	ENTRY(atomic_inc_8_nv)
80	ALTENTRY(atomic_inc_uchar_nv)
81	movl	4(%esp), %edx	/ %edx = target address
82	movb	(%edx), %al	/ %al = old value
831:
84	leal	1(%eax), %ecx	/ %cl = new value
85	lock
86	cmpxchgb %cl, (%edx)	/ try to stick it in
87	jne	1b
88	movzbl	%cl, %eax	/ return new value
89	ret
90	SET_SIZE(atomic_inc_uchar_nv)
91	SET_SIZE(atomic_inc_8_nv)
92
93	ENTRY(atomic_inc_16_nv)
94	ALTENTRY(atomic_inc_ushort_nv)
95	movl	4(%esp), %edx	/ %edx = target address
96	movw	(%edx), %ax	/ %ax = old value
971:
98	leal	1(%eax), %ecx	/ %cx = new value
99	lock
100	cmpxchgw %cx, (%edx)	/ try to stick it in
101	jne	1b
102	movzwl	%cx, %eax	/ return new value
103	ret
104	SET_SIZE(atomic_inc_ushort_nv)
105	SET_SIZE(atomic_inc_16_nv)
106
107	ENTRY(atomic_inc_32_nv)
108	ALTENTRY(atomic_inc_uint_nv)
109	ALTENTRY(atomic_inc_ulong_nv)
110	movl	4(%esp), %edx	/ %edx = target address
111	movl	(%edx), %eax	/ %eax = old value
1121:
113	leal	1(%eax), %ecx	/ %ecx = new value
114	lock
115	cmpxchgl %ecx, (%edx)	/ try to stick it in
116	jne	1b
117	movl	%ecx, %eax	/ return new value
118	ret
119	SET_SIZE(atomic_inc_ulong_nv)
120	SET_SIZE(atomic_inc_uint_nv)
121	SET_SIZE(atomic_inc_32_nv)
122
123	/*
124	 * NOTE: If atomic_inc_64 and atomic_inc_64_nv are ever
125	 * separated, you need to also edit the libc i386 platform
126	 * specific mapfile and remove the NODYNSORT attribute
127	 * from atomic_inc_64_nv.
128	 */
129	ENTRY(atomic_inc_64)
130	ALTENTRY(atomic_inc_64_nv)
131	pushl	%edi
132	pushl	%ebx
133	movl	12(%esp), %edi	/ %edi = target address
134	movl	(%edi), %eax
135	movl	4(%edi), %edx	/ %edx:%eax = old value
1361:
137	xorl	%ebx, %ebx
138	xorl	%ecx, %ecx
139	incl	%ebx		/ %ecx:%ebx = 1
140	addl	%eax, %ebx
141	adcl	%edx, %ecx	/ add in the carry from inc
142	lock
143	cmpxchg8b (%edi)	/ try to stick it in
144	jne	1b
145	movl	%ebx, %eax
146	movl	%ecx, %edx	/ return new value
147	popl	%ebx
148	popl	%edi
149	ret
150	SET_SIZE(atomic_inc_64_nv)
151	SET_SIZE(atomic_inc_64)
152
153	ENTRY(atomic_dec_8)
154	ALTENTRY(atomic_dec_uchar)
155	movl	4(%esp), %eax
156	lock
157	decb	(%eax)
158	ret
159	SET_SIZE(atomic_dec_uchar)
160	SET_SIZE(atomic_dec_8)
161
162	ENTRY(atomic_dec_16)
163	ALTENTRY(atomic_dec_ushort)
164	movl	4(%esp), %eax
165	lock
166	decw	(%eax)
167	ret
168	SET_SIZE(atomic_dec_ushort)
169	SET_SIZE(atomic_dec_16)
170
171	ENTRY(atomic_dec_32)
172	ALTENTRY(atomic_dec_uint)
173	ALTENTRY(atomic_dec_ulong)
174	movl	4(%esp), %eax
175	lock
176	decl	(%eax)
177	ret
178	SET_SIZE(atomic_dec_ulong)
179	SET_SIZE(atomic_dec_uint)
180	SET_SIZE(atomic_dec_32)
181
182	ENTRY(atomic_dec_8_nv)
183	ALTENTRY(atomic_dec_uchar_nv)
184	movl	4(%esp), %edx	/ %edx = target address
185	movb	(%edx), %al	/ %al = old value
1861:
187	leal	-1(%eax), %ecx	/ %cl = new value
188	lock
189	cmpxchgb %cl, (%edx)	/ try to stick it in
190	jne	1b
191	movzbl	%cl, %eax	/ return new value
192	ret
193	SET_SIZE(atomic_dec_uchar_nv)
194	SET_SIZE(atomic_dec_8_nv)
195
196	ENTRY(atomic_dec_16_nv)
197	ALTENTRY(atomic_dec_ushort_nv)
198	movl	4(%esp), %edx	/ %edx = target address
199	movw	(%edx), %ax	/ %ax = old value
2001:
201	leal	-1(%eax), %ecx	/ %cx = new value
202	lock
203	cmpxchgw %cx, (%edx)	/ try to stick it in
204	jne	1b
205	movzwl	%cx, %eax	/ return new value
206	ret
207	SET_SIZE(atomic_dec_ushort_nv)
208	SET_SIZE(atomic_dec_16_nv)
209
210	ENTRY(atomic_dec_32_nv)
211	ALTENTRY(atomic_dec_uint_nv)
212	ALTENTRY(atomic_dec_ulong_nv)
213	movl	4(%esp), %edx	/ %edx = target address
214	movl	(%edx), %eax	/ %eax = old value
2151:
216	leal	-1(%eax), %ecx	/ %ecx = new value
217	lock
218	cmpxchgl %ecx, (%edx)	/ try to stick it in
219	jne	1b
220	movl	%ecx, %eax	/ return new value
221	ret
222	SET_SIZE(atomic_dec_ulong_nv)
223	SET_SIZE(atomic_dec_uint_nv)
224	SET_SIZE(atomic_dec_32_nv)
225
226	/*
227	 * NOTE: If atomic_dec_64 and atomic_dec_64_nv are ever
228	 * separated, it is important to edit the libc i386 platform
229	 * specific mapfile and remove the NODYNSORT attribute
230	 * from atomic_dec_64_nv.
231	 */
232	ENTRY(atomic_dec_64)
233	ALTENTRY(atomic_dec_64_nv)
234	pushl	%edi
235	pushl	%ebx
236	movl	12(%esp), %edi	/ %edi = target address
237	movl	(%edi), %eax
238	movl	4(%edi), %edx	/ %edx:%eax = old value
2391:
240	xorl	%ebx, %ebx
241	xorl	%ecx, %ecx
242	not	%ecx
243	not	%ebx		/ %ecx:%ebx = -1
244	addl	%eax, %ebx
245	adcl	%edx, %ecx	/ add in the carry from inc
246	lock
247	cmpxchg8b (%edi)	/ try to stick it in
248	jne	1b
249	movl	%ebx, %eax
250	movl	%ecx, %edx	/ return new value
251	popl	%ebx
252	popl	%edi
253	ret
254	SET_SIZE(atomic_dec_64_nv)
255	SET_SIZE(atomic_dec_64)
256
257	ENTRY(atomic_add_8)
258	ALTENTRY(atomic_add_char)
259	movl	4(%esp), %eax
260	movl	8(%esp), %ecx
261	lock
262	addb	%cl, (%eax)
263	ret
264	SET_SIZE(atomic_add_char)
265	SET_SIZE(atomic_add_8)
266
267	ENTRY(atomic_add_16)
268	ALTENTRY(atomic_add_short)
269	movl	4(%esp), %eax
270	movl	8(%esp), %ecx
271	lock
272	addw	%cx, (%eax)
273	ret
274	SET_SIZE(atomic_add_short)
275	SET_SIZE(atomic_add_16)
276
277	ENTRY(atomic_add_32)
278	ALTENTRY(atomic_add_int)
279	ALTENTRY(atomic_add_ptr)
280	ALTENTRY(atomic_add_long)
281	movl	4(%esp), %eax
282	movl	8(%esp), %ecx
283	lock
284	addl	%ecx, (%eax)
285	ret
286	SET_SIZE(atomic_add_long)
287	SET_SIZE(atomic_add_ptr)
288	SET_SIZE(atomic_add_int)
289	SET_SIZE(atomic_add_32)
290
291	ENTRY(atomic_or_8)
292	ALTENTRY(atomic_or_uchar)
293	movl	4(%esp), %eax
294	movb	8(%esp), %cl
295	lock
296	orb	%cl, (%eax)
297	ret
298	SET_SIZE(atomic_or_uchar)
299	SET_SIZE(atomic_or_8)
300
301	ENTRY(atomic_or_16)
302	ALTENTRY(atomic_or_ushort)
303	movl	4(%esp), %eax
304	movw	8(%esp), %cx
305	lock
306	orw	%cx, (%eax)
307	ret
308	SET_SIZE(atomic_or_ushort)
309	SET_SIZE(atomic_or_16)
310
311	ENTRY(atomic_or_32)
312	ALTENTRY(atomic_or_uint)
313	ALTENTRY(atomic_or_ulong)
314	movl	4(%esp), %eax
315	movl	8(%esp), %ecx
316	lock
317	orl	%ecx, (%eax)
318	ret
319	SET_SIZE(atomic_or_ulong)
320	SET_SIZE(atomic_or_uint)
321	SET_SIZE(atomic_or_32)
322
323	ENTRY(atomic_and_8)
324	ALTENTRY(atomic_and_uchar)
325	movl	4(%esp), %eax
326	movb	8(%esp), %cl
327	lock
328	andb	%cl, (%eax)
329	ret
330	SET_SIZE(atomic_and_uchar)
331	SET_SIZE(atomic_and_8)
332
333	ENTRY(atomic_and_16)
334	ALTENTRY(atomic_and_ushort)
335	movl	4(%esp), %eax
336	movw	8(%esp), %cx
337	lock
338	andw	%cx, (%eax)
339	ret
340	SET_SIZE(atomic_and_ushort)
341	SET_SIZE(atomic_and_16)
342
343	ENTRY(atomic_and_32)
344	ALTENTRY(atomic_and_uint)
345	ALTENTRY(atomic_and_ulong)
346	movl	4(%esp), %eax
347	movl	8(%esp), %ecx
348	lock
349	andl	%ecx, (%eax)
350	ret
351	SET_SIZE(atomic_and_ulong)
352	SET_SIZE(atomic_and_uint)
353	SET_SIZE(atomic_and_32)
354
355	ENTRY(atomic_add_8_nv)
356	ALTENTRY(atomic_add_char_nv)
357	movl	4(%esp), %edx	/ %edx = target address
358	movb	(%edx), %al	/ %al = old value
3591:
360	movl	8(%esp), %ecx	/ %ecx = delta
361	addb	%al, %cl	/ %cl = new value
362	lock
363	cmpxchgb %cl, (%edx)	/ try to stick it in
364	jne	1b
365	movzbl	%cl, %eax	/ return new value
366	ret
367	SET_SIZE(atomic_add_char_nv)
368	SET_SIZE(atomic_add_8_nv)
369
370	ENTRY(atomic_add_16_nv)
371	ALTENTRY(atomic_add_short_nv)
372	movl	4(%esp), %edx	/ %edx = target address
373	movw	(%edx), %ax	/ %ax = old value
3741:
375	movl	8(%esp), %ecx	/ %ecx = delta
376	addw	%ax, %cx	/ %cx = new value
377	lock
378	cmpxchgw %cx, (%edx)	/ try to stick it in
379	jne	1b
380	movzwl	%cx, %eax	/ return new value
381	ret
382	SET_SIZE(atomic_add_short_nv)
383	SET_SIZE(atomic_add_16_nv)
384
385	ENTRY(atomic_add_32_nv)
386	ALTENTRY(atomic_add_int_nv)
387	ALTENTRY(atomic_add_ptr_nv)
388	ALTENTRY(atomic_add_long_nv)
389	movl	4(%esp), %edx	/ %edx = target address
390	movl	(%edx), %eax	/ %eax = old value
3911:
392	movl	8(%esp), %ecx	/ %ecx = delta
393	addl	%eax, %ecx	/ %ecx = new value
394	lock
395	cmpxchgl %ecx, (%edx)	/ try to stick it in
396	jne	1b
397	movl	%ecx, %eax	/ return new value
398	ret
399	SET_SIZE(atomic_add_long_nv)
400	SET_SIZE(atomic_add_ptr_nv)
401	SET_SIZE(atomic_add_int_nv)
402	SET_SIZE(atomic_add_32_nv)
403
404	/*
405	 * NOTE: If atomic_add_64 and atomic_add_64_nv are ever
406	 * separated, it is important to edit the libc i386 platform
407	 * specific mapfile and remove the NODYNSORT attribute
408	 * from atomic_add_64_nv.
409	 */
410	ENTRY(atomic_add_64)
411	ALTENTRY(atomic_add_64_nv)
412	pushl	%edi
413	pushl	%ebx
414	movl	12(%esp), %edi	/ %edi = target address
415	movl	(%edi), %eax
416	movl	4(%edi), %edx	/ %edx:%eax = old value
4171:
418	movl	16(%esp), %ebx
419	movl	20(%esp), %ecx	/ %ecx:%ebx = delta
420	addl	%eax, %ebx
421	adcl	%edx, %ecx	/ %ecx:%ebx = new value
422	lock
423	cmpxchg8b (%edi)	/ try to stick it in
424	jne	1b
425	movl	%ebx, %eax
426	movl	%ecx, %edx	/ return new value
427	popl	%ebx
428	popl	%edi
429	ret
430	SET_SIZE(atomic_add_64_nv)
431	SET_SIZE(atomic_add_64)
432
433	ENTRY(atomic_or_8_nv)
434	ALTENTRY(atomic_or_uchar_nv)
435	movl	4(%esp), %edx	/ %edx = target address
436	movb	(%edx), %al	/ %al = old value
4371:
438	movl	8(%esp), %ecx	/ %ecx = delta
439	orb	%al, %cl	/ %cl = new value
440	lock
441	cmpxchgb %cl, (%edx)	/ try to stick it in
442	jne	1b
443	movzbl	%cl, %eax	/ return new value
444	ret
445	SET_SIZE(atomic_or_uchar_nv)
446	SET_SIZE(atomic_or_8_nv)
447
448	ENTRY(atomic_or_16_nv)
449	ALTENTRY(atomic_or_ushort_nv)
450	movl	4(%esp), %edx	/ %edx = target address
451	movw	(%edx), %ax	/ %ax = old value
4521:
453	movl	8(%esp), %ecx	/ %ecx = delta
454	orw	%ax, %cx	/ %cx = new value
455	lock
456	cmpxchgw %cx, (%edx)	/ try to stick it in
457	jne	1b
458	movzwl	%cx, %eax	/ return new value
459	ret
460	SET_SIZE(atomic_or_ushort_nv)
461	SET_SIZE(atomic_or_16_nv)
462
463	ENTRY(atomic_or_32_nv)
464	ALTENTRY(atomic_or_uint_nv)
465	ALTENTRY(atomic_or_ulong_nv)
466	movl	4(%esp), %edx	/ %edx = target address
467	movl	(%edx), %eax	/ %eax = old value
4681:
469	movl	8(%esp), %ecx	/ %ecx = delta
470	orl	%eax, %ecx	/ %ecx = new value
471	lock
472	cmpxchgl %ecx, (%edx)	/ try to stick it in
473	jne	1b
474	movl	%ecx, %eax	/ return new value
475	ret
476	SET_SIZE(atomic_or_ulong_nv)
477	SET_SIZE(atomic_or_uint_nv)
478	SET_SIZE(atomic_or_32_nv)
479
480	/*
481	 * NOTE: If atomic_or_64 and atomic_or_64_nv are ever
482	 * separated, it is important to edit the libc i386 platform
483	 * specific mapfile and remove the NODYNSORT attribute
484	 * from atomic_or_64_nv.
485	 */
486	ENTRY(atomic_or_64)
487	ALTENTRY(atomic_or_64_nv)
488	pushl	%edi
489	pushl	%ebx
490	movl	12(%esp), %edi	/ %edi = target address
491	movl	(%edi), %eax
492	movl	4(%edi), %edx	/ %edx:%eax = old value
4931:
494	movl	16(%esp), %ebx
495	movl	20(%esp), %ecx	/ %ecx:%ebx = delta
496	orl	%eax, %ebx
497	orl	%edx, %ecx	/ %ecx:%ebx = new value
498	lock
499	cmpxchg8b (%edi)	/ try to stick it in
500	jne	1b
501	movl	%ebx, %eax
502	movl	%ecx, %edx	/ return new value
503	popl	%ebx
504	popl	%edi
505	ret
506	SET_SIZE(atomic_or_64_nv)
507	SET_SIZE(atomic_or_64)
508
509	ENTRY(atomic_and_8_nv)
510	ALTENTRY(atomic_and_uchar_nv)
511	movl	4(%esp), %edx	/ %edx = target address
512	movb	(%edx), %al	/ %al = old value
5131:
514	movl	8(%esp), %ecx	/ %ecx = delta
515	andb	%al, %cl	/ %cl = new value
516	lock
517	cmpxchgb %cl, (%edx)	/ try to stick it in
518	jne	1b
519	movzbl	%cl, %eax	/ return new value
520	ret
521	SET_SIZE(atomic_and_uchar_nv)
522	SET_SIZE(atomic_and_8_nv)
523
524	ENTRY(atomic_and_16_nv)
525	ALTENTRY(atomic_and_ushort_nv)
526	movl	4(%esp), %edx	/ %edx = target address
527	movw	(%edx), %ax	/ %ax = old value
5281:
529	movl	8(%esp), %ecx	/ %ecx = delta
530	andw	%ax, %cx	/ %cx = new value
531	lock
532	cmpxchgw %cx, (%edx)	/ try to stick it in
533	jne	1b
534	movzwl	%cx, %eax	/ return new value
535	ret
536	SET_SIZE(atomic_and_ushort_nv)
537	SET_SIZE(atomic_and_16_nv)
538
539	ENTRY(atomic_and_32_nv)
540	ALTENTRY(atomic_and_uint_nv)
541	ALTENTRY(atomic_and_ulong_nv)
542	movl	4(%esp), %edx	/ %edx = target address
543	movl	(%edx), %eax	/ %eax = old value
5441:
545	movl	8(%esp), %ecx	/ %ecx = delta
546	andl	%eax, %ecx	/ %ecx = new value
547	lock
548	cmpxchgl %ecx, (%edx)	/ try to stick it in
549	jne	1b
550	movl	%ecx, %eax	/ return new value
551	ret
552	SET_SIZE(atomic_and_ulong_nv)
553	SET_SIZE(atomic_and_uint_nv)
554	SET_SIZE(atomic_and_32_nv)
555
556	/*
557	 * NOTE: If atomic_and_64 and atomic_and_64_nv are ever
558	 * separated, it is important to edit the libc i386 platform
559	 * specific mapfile and remove the NODYNSORT attribute
560	 * from atomic_and_64_nv.
561	 */
562	ENTRY(atomic_and_64)
563	ALTENTRY(atomic_and_64_nv)
564	pushl	%edi
565	pushl	%ebx
566	movl	12(%esp), %edi	/ %edi = target address
567	movl	(%edi), %eax
568	movl	4(%edi), %edx	/ %edx:%eax = old value
5691:
570	movl	16(%esp), %ebx
571	movl	20(%esp), %ecx	/ %ecx:%ebx = delta
572	andl	%eax, %ebx
573	andl	%edx, %ecx	/ %ecx:%ebx = new value
574	lock
575	cmpxchg8b (%edi)	/ try to stick it in
576	jne	1b
577	movl	%ebx, %eax
578	movl	%ecx, %edx	/ return new value
579	popl	%ebx
580	popl	%edi
581	ret
582	SET_SIZE(atomic_and_64_nv)
583	SET_SIZE(atomic_and_64)
584
585	ENTRY(atomic_cas_8)
586	ALTENTRY(atomic_cas_uchar)
587	movl	4(%esp), %edx
588	movzbl	8(%esp), %eax
589	movb	12(%esp), %cl
590	lock
591	cmpxchgb %cl, (%edx)
592	ret
593	SET_SIZE(atomic_cas_uchar)
594	SET_SIZE(atomic_cas_8)
595
596	ENTRY(atomic_cas_16)
597	ALTENTRY(atomic_cas_ushort)
598	movl	4(%esp), %edx
599	movzwl	8(%esp), %eax
600	movw	12(%esp), %cx
601	lock
602	cmpxchgw %cx, (%edx)
603	ret
604	SET_SIZE(atomic_cas_ushort)
605	SET_SIZE(atomic_cas_16)
606
607	ENTRY(atomic_cas_32)
608	ALTENTRY(atomic_cas_uint)
609	ALTENTRY(atomic_cas_ulong)
610	ALTENTRY(atomic_cas_ptr)
611	movl	4(%esp), %edx
612	movl	8(%esp), %eax
613	movl	12(%esp), %ecx
614	lock
615	cmpxchgl %ecx, (%edx)
616	ret
617	SET_SIZE(atomic_cas_ptr)
618	SET_SIZE(atomic_cas_ulong)
619	SET_SIZE(atomic_cas_uint)
620	SET_SIZE(atomic_cas_32)
621
622	ENTRY(atomic_cas_64)
623	pushl	%ebx
624	pushl	%esi
625	movl	12(%esp), %esi
626	movl	16(%esp), %eax
627	movl	20(%esp), %edx
628	movl	24(%esp), %ebx
629	movl	28(%esp), %ecx
630	lock
631	cmpxchg8b (%esi)
632	popl	%esi
633	popl	%ebx
634	ret
635	SET_SIZE(atomic_cas_64)
636
637	ENTRY(atomic_swap_8)
638	ALTENTRY(atomic_swap_uchar)
639	movl	4(%esp), %edx
640	movzbl	8(%esp), %eax
641	lock
642	xchgb	%al, (%edx)
643	ret
644	SET_SIZE(atomic_swap_uchar)
645	SET_SIZE(atomic_swap_8)
646
647	ENTRY(atomic_swap_16)
648	ALTENTRY(atomic_swap_ushort)
649	movl	4(%esp), %edx
650	movzwl	8(%esp), %eax
651	lock
652	xchgw	%ax, (%edx)
653	ret
654	SET_SIZE(atomic_swap_ushort)
655	SET_SIZE(atomic_swap_16)
656
657	ENTRY(atomic_swap_32)
658	ALTENTRY(atomic_swap_uint)
659	ALTENTRY(atomic_swap_ptr)
660	ALTENTRY(atomic_swap_ulong)
661	movl	4(%esp), %edx
662	movl	8(%esp), %eax
663	lock
664	xchgl	%eax, (%edx)
665	ret
666	SET_SIZE(atomic_swap_ulong)
667	SET_SIZE(atomic_swap_ptr)
668	SET_SIZE(atomic_swap_uint)
669	SET_SIZE(atomic_swap_32)
670
671	ENTRY(atomic_swap_64)
672	pushl	%esi
673	pushl	%ebx
674	movl	12(%esp), %esi
675	movl	16(%esp), %ebx
676	movl	20(%esp), %ecx
677	movl	(%esi), %eax
678	movl	4(%esi), %edx	/ %edx:%eax = old value
6791:
680	lock
681	cmpxchg8b (%esi)
682	jne	1b
683	popl	%ebx
684	popl	%esi
685	ret
686	SET_SIZE(atomic_swap_64)
687
688	ENTRY(atomic_set_long_excl)
689	movl	4(%esp), %edx	/ %edx = target address
690	movl	8(%esp), %ecx	/ %ecx = bit id
691	xorl	%eax, %eax
692	lock
693	btsl	%ecx, (%edx)
694	jnc	1f
695	decl	%eax		/ return -1
6961:
697	ret
698	SET_SIZE(atomic_set_long_excl)
699
700	ENTRY(atomic_clear_long_excl)
701	movl	4(%esp), %edx	/ %edx = target address
702	movl	8(%esp), %ecx	/ %ecx = bit id
703	xorl	%eax, %eax
704	lock
705	btrl	%ecx, (%edx)
706	jc	1f
707	decl	%eax		/ return -1
7081:
709	ret
710	SET_SIZE(atomic_clear_long_excl)
711
712#if !defined(_KERNEL)
713
714	/*
715	 * NOTE: membar_enter, membar_exit, membar_producer, and
716	 * membar_consumer are all identical routines. We define them
717	 * separately, instead of using ALTENTRY definitions to alias them
718	 * together, so that DTrace and debuggers will see a unique address
719	 * for them, allowing more accurate tracing.
720	*/
721
722
723	ENTRY(membar_enter)
724	lock
725	xorl	$0, (%esp)
726	ret
727	SET_SIZE(membar_enter)
728
729	ENTRY(membar_exit)
730	lock
731	xorl	$0, (%esp)
732	ret
733	SET_SIZE(membar_exit)
734
735	ENTRY(membar_producer)
736	lock
737	xorl	$0, (%esp)
738	ret
739	SET_SIZE(membar_producer)
740
741	ENTRY(membar_consumer)
742	lock
743	xorl	$0, (%esp)
744	ret
745	SET_SIZE(membar_consumer)
746
747#endif	/* !_KERNEL */
748