xref: /titanic_50/usr/src/common/atomic/i386/atomic.s (revision 67318e4a54c292d543e6b077199ce492b3d3a049)
1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21
22/*
23 * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
24 * Use is subject to license terms.
25 */
26
27#pragma ident	"%Z%%M%	%I%	%E% SMI"
28
29	.file	"%M%"
30
31#include <sys/asm_linkage.h>
32
33#if defined(_KERNEL)
34	/*
35	 * Legacy kernel interfaces; they will go away (eventually).
36	 */
37	ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
38	ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
39	ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
40	ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
41	ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
42	ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
43	ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
44#endif
45
46	ENTRY(atomic_inc_8)
47	ALTENTRY(atomic_inc_uchar)
48	movl	4(%esp), %eax
49	lock
50	incb	(%eax)
51	ret
52	SET_SIZE(atomic_inc_uchar)
53	SET_SIZE(atomic_inc_8)
54
55	ENTRY(atomic_inc_16)
56	ALTENTRY(atomic_inc_ushort)
57	movl	4(%esp), %eax
58	lock
59	incw	(%eax)
60	ret
61	SET_SIZE(atomic_inc_ushort)
62	SET_SIZE(atomic_inc_16)
63
64	ENTRY(atomic_inc_32)
65	ALTENTRY(atomic_inc_uint)
66	ALTENTRY(atomic_inc_ulong)
67	movl	4(%esp), %eax
68	lock
69	incl	(%eax)
70	ret
71	SET_SIZE(atomic_inc_ulong)
72	SET_SIZE(atomic_inc_uint)
73	SET_SIZE(atomic_inc_32)
74
75	ENTRY(atomic_inc_8_nv)
76	ALTENTRY(atomic_inc_uchar_nv)
77	movl	4(%esp), %edx	/ %edx = target address
78	movb	(%edx), %al	/ %al = old value
791:
80	leal	1(%eax), %ecx	/ %cl = new value
81	lock
82	cmpxchgb %cl, (%edx)	/ try to stick it in
83	jne	1b
84	movzbl	%cl, %eax	/ return new value
85	ret
86	SET_SIZE(atomic_inc_uchar_nv)
87	SET_SIZE(atomic_inc_8_nv)
88
89	ENTRY(atomic_inc_16_nv)
90	ALTENTRY(atomic_inc_ushort_nv)
91	movl	4(%esp), %edx	/ %edx = target address
92	movw	(%edx), %ax	/ %ax = old value
931:
94	leal	1(%eax), %ecx	/ %cx = new value
95	lock
96	cmpxchgw %cx, (%edx)	/ try to stick it in
97	jne	1b
98	movzwl	%cx, %eax	/ return new value
99	ret
100	SET_SIZE(atomic_inc_ushort_nv)
101	SET_SIZE(atomic_inc_16_nv)
102
103	ENTRY(atomic_inc_32_nv)
104	ALTENTRY(atomic_inc_uint_nv)
105	ALTENTRY(atomic_inc_ulong_nv)
106	movl	4(%esp), %edx	/ %edx = target address
107	movl	(%edx), %eax	/ %eax = old value
1081:
109	leal	1(%eax), %ecx	/ %ecx = new value
110	lock
111	cmpxchgl %ecx, (%edx)	/ try to stick it in
112	jne	1b
113	movl	%ecx, %eax	/ return new value
114	ret
115	SET_SIZE(atomic_inc_ulong_nv)
116	SET_SIZE(atomic_inc_uint_nv)
117	SET_SIZE(atomic_inc_32_nv)
118
119	/*
120	 * NOTE: If atomic_inc_64 and atomic_inc_64_nv are ever
121	 * separated, you need to also edit the libc i386 platform
122	 * specific mapfile and remove the NODYNSORT attribute
123	 * from atomic_inc_64_nv.
124	 */
125	ENTRY(atomic_inc_64)
126	ALTENTRY(atomic_inc_64_nv)
127	pushl	%edi
128	pushl	%ebx
129	movl	12(%esp), %edi	/ %edi = target address
130	movl	(%edi), %eax
131	movl	4(%edi), %edx	/ %edx:%eax = old value
1321:
133	xorl	%ebx, %ebx
134	xorl	%ecx, %ecx
135	incl	%ebx		/ %ecx:%ebx = 1
136	addl	%eax, %ebx
137	adcl	%edx, %ecx	/ add in the carry from inc
138	lock
139	cmpxchg8b (%edi)	/ try to stick it in
140	jne	1b
141	movl	%ebx, %eax
142	movl	%ecx, %edx	/ return new value
143	popl	%ebx
144	popl	%edi
145	ret
146	SET_SIZE(atomic_inc_64_nv)
147	SET_SIZE(atomic_inc_64)
148
149	ENTRY(atomic_dec_8)
150	ALTENTRY(atomic_dec_uchar)
151	movl	4(%esp), %eax
152	lock
153	decb	(%eax)
154	ret
155	SET_SIZE(atomic_dec_uchar)
156	SET_SIZE(atomic_dec_8)
157
158	ENTRY(atomic_dec_16)
159	ALTENTRY(atomic_dec_ushort)
160	movl	4(%esp), %eax
161	lock
162	decw	(%eax)
163	ret
164	SET_SIZE(atomic_dec_ushort)
165	SET_SIZE(atomic_dec_16)
166
167	ENTRY(atomic_dec_32)
168	ALTENTRY(atomic_dec_uint)
169	ALTENTRY(atomic_dec_ulong)
170	movl	4(%esp), %eax
171	lock
172	decl	(%eax)
173	ret
174	SET_SIZE(atomic_dec_ulong)
175	SET_SIZE(atomic_dec_uint)
176	SET_SIZE(atomic_dec_32)
177
178	ENTRY(atomic_dec_8_nv)
179	ALTENTRY(atomic_dec_uchar_nv)
180	movl	4(%esp), %edx	/ %edx = target address
181	movb	(%edx), %al	/ %al = old value
1821:
183	leal	-1(%eax), %ecx	/ %cl = new value
184	lock
185	cmpxchgb %cl, (%edx)	/ try to stick it in
186	jne	1b
187	movzbl	%cl, %eax	/ return new value
188	ret
189	SET_SIZE(atomic_dec_uchar_nv)
190	SET_SIZE(atomic_dec_8_nv)
191
192	ENTRY(atomic_dec_16_nv)
193	ALTENTRY(atomic_dec_ushort_nv)
194	movl	4(%esp), %edx	/ %edx = target address
195	movw	(%edx), %ax	/ %ax = old value
1961:
197	leal	-1(%eax), %ecx	/ %cx = new value
198	lock
199	cmpxchgw %cx, (%edx)	/ try to stick it in
200	jne	1b
201	movzwl	%cx, %eax	/ return new value
202	ret
203	SET_SIZE(atomic_dec_ushort_nv)
204	SET_SIZE(atomic_dec_16_nv)
205
206	ENTRY(atomic_dec_32_nv)
207	ALTENTRY(atomic_dec_uint_nv)
208	ALTENTRY(atomic_dec_ulong_nv)
209	movl	4(%esp), %edx	/ %edx = target address
210	movl	(%edx), %eax	/ %eax = old value
2111:
212	leal	-1(%eax), %ecx	/ %ecx = new value
213	lock
214	cmpxchgl %ecx, (%edx)	/ try to stick it in
215	jne	1b
216	movl	%ecx, %eax	/ return new value
217	ret
218	SET_SIZE(atomic_dec_ulong_nv)
219	SET_SIZE(atomic_dec_uint_nv)
220	SET_SIZE(atomic_dec_32_nv)
221
222	/*
223	 * NOTE: If atomic_dec_64 and atomic_dec_64_nv are ever
224	 * separated, it is important to edit the libc i386 platform
225	 * specific mapfile and remove the NODYNSORT attribute
226	 * from atomic_dec_64_nv.
227	 */
228	ENTRY(atomic_dec_64)
229	ALTENTRY(atomic_dec_64_nv)
230	pushl	%edi
231	pushl	%ebx
232	movl	12(%esp), %edi	/ %edi = target address
233	movl	(%edi), %eax
234	movl	4(%edi), %edx	/ %edx:%eax = old value
2351:
236	xorl	%ebx, %ebx
237	xorl	%ecx, %ecx
238	not	%ecx
239	not	%ebx		/ %ecx:%ebx = -1
240	addl	%eax, %ebx
241	adcl	%edx, %ecx	/ add in the carry from inc
242	lock
243	cmpxchg8b (%edi)	/ try to stick it in
244	jne	1b
245	movl	%ebx, %eax
246	movl	%ecx, %edx	/ return new value
247	popl	%ebx
248	popl	%edi
249	ret
250	SET_SIZE(atomic_dec_64_nv)
251	SET_SIZE(atomic_dec_64)
252
253	ENTRY(atomic_add_8)
254	ALTENTRY(atomic_add_char)
255	movl	4(%esp), %eax
256	movl	8(%esp), %ecx
257	lock
258	addb	%cl, (%eax)
259	ret
260	SET_SIZE(atomic_add_char)
261	SET_SIZE(atomic_add_8)
262
263	ENTRY(atomic_add_16)
264	ALTENTRY(atomic_add_short)
265	movl	4(%esp), %eax
266	movl	8(%esp), %ecx
267	lock
268	addw	%cx, (%eax)
269	ret
270	SET_SIZE(atomic_add_short)
271	SET_SIZE(atomic_add_16)
272
273	ENTRY(atomic_add_32)
274	ALTENTRY(atomic_add_int)
275	ALTENTRY(atomic_add_ptr)
276	ALTENTRY(atomic_add_long)
277	movl	4(%esp), %eax
278	movl	8(%esp), %ecx
279	lock
280	addl	%ecx, (%eax)
281	ret
282	SET_SIZE(atomic_add_long)
283	SET_SIZE(atomic_add_ptr)
284	SET_SIZE(atomic_add_int)
285	SET_SIZE(atomic_add_32)
286
287	ENTRY(atomic_or_8)
288	ALTENTRY(atomic_or_uchar)
289	movl	4(%esp), %eax
290	movb	8(%esp), %cl
291	lock
292	orb	%cl, (%eax)
293	ret
294	SET_SIZE(atomic_or_uchar)
295	SET_SIZE(atomic_or_8)
296
297	ENTRY(atomic_or_16)
298	ALTENTRY(atomic_or_ushort)
299	movl	4(%esp), %eax
300	movw	8(%esp), %cx
301	lock
302	orw	%cx, (%eax)
303	ret
304	SET_SIZE(atomic_or_ushort)
305	SET_SIZE(atomic_or_16)
306
307	ENTRY(atomic_or_32)
308	ALTENTRY(atomic_or_uint)
309	ALTENTRY(atomic_or_ulong)
310	movl	4(%esp), %eax
311	movl	8(%esp), %ecx
312	lock
313	orl	%ecx, (%eax)
314	ret
315	SET_SIZE(atomic_or_ulong)
316	SET_SIZE(atomic_or_uint)
317	SET_SIZE(atomic_or_32)
318
319	ENTRY(atomic_and_8)
320	ALTENTRY(atomic_and_uchar)
321	movl	4(%esp), %eax
322	movb	8(%esp), %cl
323	lock
324	andb	%cl, (%eax)
325	ret
326	SET_SIZE(atomic_and_uchar)
327	SET_SIZE(atomic_and_8)
328
329	ENTRY(atomic_and_16)
330	ALTENTRY(atomic_and_ushort)
331	movl	4(%esp), %eax
332	movw	8(%esp), %cx
333	lock
334	andw	%cx, (%eax)
335	ret
336	SET_SIZE(atomic_and_ushort)
337	SET_SIZE(atomic_and_16)
338
339	ENTRY(atomic_and_32)
340	ALTENTRY(atomic_and_uint)
341	ALTENTRY(atomic_and_ulong)
342	movl	4(%esp), %eax
343	movl	8(%esp), %ecx
344	lock
345	andl	%ecx, (%eax)
346	ret
347	SET_SIZE(atomic_and_ulong)
348	SET_SIZE(atomic_and_uint)
349	SET_SIZE(atomic_and_32)
350
351	ENTRY(atomic_add_8_nv)
352	ALTENTRY(atomic_add_char_nv)
353	movl	4(%esp), %edx	/ %edx = target address
354	movb	(%edx), %al	/ %al = old value
3551:
356	movl	8(%esp), %ecx	/ %ecx = delta
357	addb	%al, %cl	/ %cl = new value
358	lock
359	cmpxchgb %cl, (%edx)	/ try to stick it in
360	jne	1b
361	movzbl	%cl, %eax	/ return new value
362	ret
363	SET_SIZE(atomic_add_char_nv)
364	SET_SIZE(atomic_add_8_nv)
365
366	ENTRY(atomic_add_16_nv)
367	ALTENTRY(atomic_add_short_nv)
368	movl	4(%esp), %edx	/ %edx = target address
369	movw	(%edx), %ax	/ %ax = old value
3701:
371	movl	8(%esp), %ecx	/ %ecx = delta
372	addw	%ax, %cx	/ %cx = new value
373	lock
374	cmpxchgw %cx, (%edx)	/ try to stick it in
375	jne	1b
376	movzwl	%cx, %eax	/ return new value
377	ret
378	SET_SIZE(atomic_add_short_nv)
379	SET_SIZE(atomic_add_16_nv)
380
381	ENTRY(atomic_add_32_nv)
382	ALTENTRY(atomic_add_int_nv)
383	ALTENTRY(atomic_add_ptr_nv)
384	ALTENTRY(atomic_add_long_nv)
385	movl	4(%esp), %edx	/ %edx = target address
386	movl	(%edx), %eax	/ %eax = old value
3871:
388	movl	8(%esp), %ecx	/ %ecx = delta
389	addl	%eax, %ecx	/ %ecx = new value
390	lock
391	cmpxchgl %ecx, (%edx)	/ try to stick it in
392	jne	1b
393	movl	%ecx, %eax	/ return new value
394	ret
395	SET_SIZE(atomic_add_long_nv)
396	SET_SIZE(atomic_add_ptr_nv)
397	SET_SIZE(atomic_add_int_nv)
398	SET_SIZE(atomic_add_32_nv)
399
400	/*
401	 * NOTE: If atomic_add_64 and atomic_add_64_nv are ever
402	 * separated, it is important to edit the libc i386 platform
403	 * specific mapfile and remove the NODYNSORT attribute
404	 * from atomic_add_64_nv.
405	 */
406	ENTRY(atomic_add_64)
407	ALTENTRY(atomic_add_64_nv)
408	pushl	%edi
409	pushl	%ebx
410	movl	12(%esp), %edi	/ %edi = target address
411	movl	(%edi), %eax
412	movl	4(%edi), %edx	/ %edx:%eax = old value
4131:
414	movl	16(%esp), %ebx
415	movl	20(%esp), %ecx	/ %ecx:%ebx = delta
416	addl	%eax, %ebx
417	adcl	%edx, %ecx	/ %ecx:%ebx = new value
418	lock
419	cmpxchg8b (%edi)	/ try to stick it in
420	jne	1b
421	movl	%ebx, %eax
422	movl	%ecx, %edx	/ return new value
423	popl	%ebx
424	popl	%edi
425	ret
426	SET_SIZE(atomic_add_64_nv)
427	SET_SIZE(atomic_add_64)
428
429	ENTRY(atomic_or_8_nv)
430	ALTENTRY(atomic_or_uchar_nv)
431	movl	4(%esp), %edx	/ %edx = target address
432	movb	(%edx), %al	/ %al = old value
4331:
434	movl	8(%esp), %ecx	/ %ecx = delta
435	orb	%al, %cl	/ %cl = new value
436	lock
437	cmpxchgb %cl, (%edx)	/ try to stick it in
438	jne	1b
439	movzbl	%cl, %eax	/ return new value
440	ret
441	SET_SIZE(atomic_or_uchar_nv)
442	SET_SIZE(atomic_or_8_nv)
443
444	ENTRY(atomic_or_16_nv)
445	ALTENTRY(atomic_or_ushort_nv)
446	movl	4(%esp), %edx	/ %edx = target address
447	movw	(%edx), %ax	/ %ax = old value
4481:
449	movl	8(%esp), %ecx	/ %ecx = delta
450	orw	%ax, %cx	/ %cx = new value
451	lock
452	cmpxchgw %cx, (%edx)	/ try to stick it in
453	jne	1b
454	movzwl	%cx, %eax	/ return new value
455	ret
456	SET_SIZE(atomic_or_ushort_nv)
457	SET_SIZE(atomic_or_16_nv)
458
459	ENTRY(atomic_or_32_nv)
460	ALTENTRY(atomic_or_uint_nv)
461	ALTENTRY(atomic_or_ulong_nv)
462	movl	4(%esp), %edx	/ %edx = target address
463	movl	(%edx), %eax	/ %eax = old value
4641:
465	movl	8(%esp), %ecx	/ %ecx = delta
466	orl	%eax, %ecx	/ %ecx = new value
467	lock
468	cmpxchgl %ecx, (%edx)	/ try to stick it in
469	jne	1b
470	movl	%ecx, %eax	/ return new value
471	ret
472	SET_SIZE(atomic_or_ulong_nv)
473	SET_SIZE(atomic_or_uint_nv)
474	SET_SIZE(atomic_or_32_nv)
475
476	/*
477	 * NOTE: If atomic_or_64 and atomic_or_64_nv are ever
478	 * separated, it is important to edit the libc i386 platform
479	 * specific mapfile and remove the NODYNSORT attribute
480	 * from atomic_or_64_nv.
481	 */
482	ENTRY(atomic_or_64)
483	ALTENTRY(atomic_or_64_nv)
484	pushl	%edi
485	pushl	%ebx
486	movl	12(%esp), %edi	/ %edi = target address
487	movl	(%edi), %eax
488	movl	4(%edi), %edx	/ %edx:%eax = old value
4891:
490	movl	16(%esp), %ebx
491	movl	20(%esp), %ecx	/ %ecx:%ebx = delta
492	orl	%eax, %ebx
493	orl	%edx, %ecx	/ %ecx:%ebx = new value
494	lock
495	cmpxchg8b (%edi)	/ try to stick it in
496	jne	1b
497	movl	%ebx, %eax
498	movl	%ecx, %edx	/ return new value
499	popl	%ebx
500	popl	%edi
501	ret
502	SET_SIZE(atomic_or_64_nv)
503	SET_SIZE(atomic_or_64)
504
505	ENTRY(atomic_and_8_nv)
506	ALTENTRY(atomic_and_uchar_nv)
507	movl	4(%esp), %edx	/ %edx = target address
508	movb	(%edx), %al	/ %al = old value
5091:
510	movl	8(%esp), %ecx	/ %ecx = delta
511	andb	%al, %cl	/ %cl = new value
512	lock
513	cmpxchgb %cl, (%edx)	/ try to stick it in
514	jne	1b
515	movzbl	%cl, %eax	/ return new value
516	ret
517	SET_SIZE(atomic_and_uchar_nv)
518	SET_SIZE(atomic_and_8_nv)
519
520	ENTRY(atomic_and_16_nv)
521	ALTENTRY(atomic_and_ushort_nv)
522	movl	4(%esp), %edx	/ %edx = target address
523	movw	(%edx), %ax	/ %ax = old value
5241:
525	movl	8(%esp), %ecx	/ %ecx = delta
526	andw	%ax, %cx	/ %cx = new value
527	lock
528	cmpxchgw %cx, (%edx)	/ try to stick it in
529	jne	1b
530	movzwl	%cx, %eax	/ return new value
531	ret
532	SET_SIZE(atomic_and_ushort_nv)
533	SET_SIZE(atomic_and_16_nv)
534
535	ENTRY(atomic_and_32_nv)
536	ALTENTRY(atomic_and_uint_nv)
537	ALTENTRY(atomic_and_ulong_nv)
538	movl	4(%esp), %edx	/ %edx = target address
539	movl	(%edx), %eax	/ %eax = old value
5401:
541	movl	8(%esp), %ecx	/ %ecx = delta
542	andl	%eax, %ecx	/ %ecx = new value
543	lock
544	cmpxchgl %ecx, (%edx)	/ try to stick it in
545	jne	1b
546	movl	%ecx, %eax	/ return new value
547	ret
548	SET_SIZE(atomic_and_ulong_nv)
549	SET_SIZE(atomic_and_uint_nv)
550	SET_SIZE(atomic_and_32_nv)
551
552	/*
553	 * NOTE: If atomic_and_64 and atomic_and_64_nv are ever
554	 * separated, it is important to edit the libc i386 platform
555	 * specific mapfile and remove the NODYNSORT attribute
556	 * from atomic_and_64_nv.
557	 */
558	ENTRY(atomic_and_64)
559	ALTENTRY(atomic_and_64_nv)
560	pushl	%edi
561	pushl	%ebx
562	movl	12(%esp), %edi	/ %edi = target address
563	movl	(%edi), %eax
564	movl	4(%edi), %edx	/ %edx:%eax = old value
5651:
566	movl	16(%esp), %ebx
567	movl	20(%esp), %ecx	/ %ecx:%ebx = delta
568	andl	%eax, %ebx
569	andl	%edx, %ecx	/ %ecx:%ebx = new value
570	lock
571	cmpxchg8b (%edi)	/ try to stick it in
572	jne	1b
573	movl	%ebx, %eax
574	movl	%ecx, %edx	/ return new value
575	popl	%ebx
576	popl	%edi
577	ret
578	SET_SIZE(atomic_and_64_nv)
579	SET_SIZE(atomic_and_64)
580
581	ENTRY(atomic_cas_8)
582	ALTENTRY(atomic_cas_uchar)
583	movl	4(%esp), %edx
584	movzbl	8(%esp), %eax
585	movb	12(%esp), %cl
586	lock
587	cmpxchgb %cl, (%edx)
588	ret
589	SET_SIZE(atomic_cas_uchar)
590	SET_SIZE(atomic_cas_8)
591
592	ENTRY(atomic_cas_16)
593	ALTENTRY(atomic_cas_ushort)
594	movl	4(%esp), %edx
595	movzwl	8(%esp), %eax
596	movw	12(%esp), %cx
597	lock
598	cmpxchgw %cx, (%edx)
599	ret
600	SET_SIZE(atomic_cas_ushort)
601	SET_SIZE(atomic_cas_16)
602
603	ENTRY(atomic_cas_32)
604	ALTENTRY(atomic_cas_uint)
605	ALTENTRY(atomic_cas_ulong)
606	ALTENTRY(atomic_cas_ptr)
607	movl	4(%esp), %edx
608	movl	8(%esp), %eax
609	movl	12(%esp), %ecx
610	lock
611	cmpxchgl %ecx, (%edx)
612	ret
613	SET_SIZE(atomic_cas_ptr)
614	SET_SIZE(atomic_cas_ulong)
615	SET_SIZE(atomic_cas_uint)
616	SET_SIZE(atomic_cas_32)
617
618	ENTRY(atomic_cas_64)
619	pushl	%ebx
620	pushl	%esi
621	movl	12(%esp), %esi
622	movl	16(%esp), %eax
623	movl	20(%esp), %edx
624	movl	24(%esp), %ebx
625	movl	28(%esp), %ecx
626	lock
627	cmpxchg8b (%esi)
628	popl	%esi
629	popl	%ebx
630	ret
631	SET_SIZE(atomic_cas_64)
632
633	ENTRY(atomic_swap_8)
634	ALTENTRY(atomic_swap_uchar)
635	movl	4(%esp), %edx
636	movzbl	8(%esp), %eax
637	lock
638	xchgb	%al, (%edx)
639	ret
640	SET_SIZE(atomic_swap_uchar)
641	SET_SIZE(atomic_swap_8)
642
643	ENTRY(atomic_swap_16)
644	ALTENTRY(atomic_swap_ushort)
645	movl	4(%esp), %edx
646	movzwl	8(%esp), %eax
647	lock
648	xchgw	%ax, (%edx)
649	ret
650	SET_SIZE(atomic_swap_ushort)
651	SET_SIZE(atomic_swap_16)
652
653	ENTRY(atomic_swap_32)
654	ALTENTRY(atomic_swap_uint)
655	ALTENTRY(atomic_swap_ptr)
656	ALTENTRY(atomic_swap_ulong)
657	movl	4(%esp), %edx
658	movl	8(%esp), %eax
659	lock
660	xchgl	%eax, (%edx)
661	ret
662	SET_SIZE(atomic_swap_ulong)
663	SET_SIZE(atomic_swap_ptr)
664	SET_SIZE(atomic_swap_uint)
665	SET_SIZE(atomic_swap_32)
666
667	ENTRY(atomic_swap_64)
668	pushl	%esi
669	pushl	%ebx
670	movl	12(%esp), %esi
671	movl	16(%esp), %ebx
672	movl	20(%esp), %ecx
673	movl	(%esi), %eax
674	movl	4(%esi), %edx	/ %edx:%eax = old value
6751:
676	lock
677	cmpxchg8b (%esi)
678	jne	1b
679	popl	%ebx
680	popl	%esi
681	ret
682	SET_SIZE(atomic_swap_64)
683
684	ENTRY(atomic_set_long_excl)
685	movl	4(%esp), %edx	/ %edx = target address
686	movl	8(%esp), %ecx	/ %ecx = bit id
687	xorl	%eax, %eax
688	lock
689	btsl	%ecx, (%edx)
690	jnc	1f
691	decl	%eax		/ return -1
6921:
693	ret
694	SET_SIZE(atomic_set_long_excl)
695
696	ENTRY(atomic_clear_long_excl)
697	movl	4(%esp), %edx	/ %edx = target address
698	movl	8(%esp), %ecx	/ %ecx = bit id
699	xorl	%eax, %eax
700	lock
701	btrl	%ecx, (%edx)
702	jc	1f
703	decl	%eax		/ return -1
7041:
705	ret
706	SET_SIZE(atomic_clear_long_excl)
707
708#if !defined(_KERNEL)
709
710	/*
711	 * NOTE: membar_enter, membar_exit, membar_producer, and
712	 * membar_consumer are all identical routines. We define them
713	 * separately, instead of using ALTENTRY definitions to alias them
714	 * together, so that DTrace and debuggers will see a unique address
715	 * for them, allowing more accurate tracing.
716	*/
717
718
719	ENTRY(membar_enter)
720	lock
721	xorl	$0, (%esp)
722	ret
723	SET_SIZE(membar_enter)
724
725	ENTRY(membar_exit)
726	lock
727	xorl	$0, (%esp)
728	ret
729	SET_SIZE(membar_exit)
730
731	ENTRY(membar_producer)
732	lock
733	xorl	$0, (%esp)
734	ret
735	SET_SIZE(membar_producer)
736
737	ENTRY(membar_consumer)
738	lock
739	xorl	$0, (%esp)
740	ret
741	SET_SIZE(membar_consumer)
742
743#endif	/* !_KERNEL */
744