xref: /titanic_51/usr/src/common/atomic/i386/atomic.s (revision 74e20cfe817b82802b16fac8690dadcda76f54f5)
1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License, Version 1.0 only
6 * (the "License").  You may not use this file except in compliance
7 * with the License.
8 *
9 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10 * or http://www.opensolaris.org/os/licensing.
11 * See the License for the specific language governing permissions
12 * and limitations under the License.
13 *
14 * When distributing Covered Code, include this CDDL HEADER in each
15 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16 * If applicable, add the following below this CDDL HEADER, with the
17 * fields enclosed by brackets "[]" replaced with your own identifying
18 * information: Portions Copyright [yyyy] [name of copyright owner]
19 *
20 * CDDL HEADER END
21 */
22/*
23 * Copyright 2005 Sun Microsystems, Inc.  All rights reserved.
24 * Use is subject to license terms.
25 */
26
27	.ident	"%Z%%M%	%I%	%E% SMI"
28
29	.file	"%M%"
30
31#include <sys/asm_linkage.h>
32
33#if defined(_KERNEL)
34	/*
35	 * Legacy kernel interfaces; they will go away (eventually).
36	 */
37	ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
38	ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
39	ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
40	ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
41	ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
42	ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
43	ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
44#else
45	/*
46	 * Include the definitions for the libc weak aliases.
47	 */
48#include "../atomic_asm_weak.h"
49#endif
50
51	ENTRY(atomic_inc_8)
52	ALTENTRY(atomic_inc_uchar)
53	movl	4(%esp), %eax
54	lock
55	incb	(%eax)
56	ret
57	SET_SIZE(atomic_inc_uchar)
58	SET_SIZE(atomic_inc_8)
59
60	ENTRY(atomic_inc_16)
61	ALTENTRY(atomic_inc_ushort)
62	movl	4(%esp), %eax
63	lock
64	incw	(%eax)
65	ret
66	SET_SIZE(atomic_inc_ushort)
67	SET_SIZE(atomic_inc_16)
68
69	ENTRY(atomic_inc_32)
70	ALTENTRY(atomic_inc_uint)
71	ALTENTRY(atomic_inc_ulong)
72	movl	4(%esp), %eax
73	lock
74	incl	(%eax)
75	ret
76	SET_SIZE(atomic_inc_ulong)
77	SET_SIZE(atomic_inc_uint)
78	SET_SIZE(atomic_inc_32)
79
80	ENTRY(atomic_inc_8_nv)
81	ALTENTRY(atomic_inc_uchar_nv)
82	movl	4(%esp), %edx	/ %edx = target address
83	movb	(%edx), %al	/ %al = old value
841:
85	leal	1(%eax), %ecx	/ %cl = new value
86	lock
87	cmpxchgb %cl, (%edx)	/ try to stick it in
88	jne	1b
89	movzbl	%cl, %eax	/ return new value
90	ret
91	SET_SIZE(atomic_inc_uchar_nv)
92	SET_SIZE(atomic_inc_8_nv)
93
94	ENTRY(atomic_inc_16_nv)
95	ALTENTRY(atomic_inc_ushort_nv)
96	movl	4(%esp), %edx	/ %edx = target address
97	movw	(%edx), %ax	/ %ax = old value
981:
99	leal	1(%eax), %ecx	/ %cx = new value
100	lock
101	cmpxchgw %cx, (%edx)	/ try to stick it in
102	jne	1b
103	movzwl	%cx, %eax	/ return new value
104	ret
105	SET_SIZE(atomic_inc_ushort_nv)
106	SET_SIZE(atomic_inc_16_nv)
107
108	ENTRY(atomic_inc_32_nv)
109	ALTENTRY(atomic_inc_uint_nv)
110	ALTENTRY(atomic_inc_ulong_nv)
111	movl	4(%esp), %edx	/ %edx = target address
112	movl	(%edx), %eax	/ %eax = old value
1131:
114	leal	1(%eax), %ecx	/ %ecx = new value
115	lock
116	cmpxchgl %ecx, (%edx)	/ try to stick it in
117	jne	1b
118	movl	%ecx, %eax	/ return new value
119	ret
120	SET_SIZE(atomic_inc_ulong_nv)
121	SET_SIZE(atomic_inc_uint_nv)
122	SET_SIZE(atomic_inc_32_nv)
123
124	ENTRY(atomic_inc_64)
125	ALTENTRY(atomic_inc_64_nv)
126	pushl	%edi
127	pushl	%ebx
128	movl	12(%esp), %edi	/ %edi = target address
129	movl	(%edi), %eax
130	movl	4(%edi), %edx	/ %edx:%eax = old value
1311:
132	xorl	%ebx, %ebx
133	xorl	%ecx, %ecx
134	incl	%ebx		/ %ecx:%ebx = 1
135	addl	%eax, %ebx
136	adcl	%edx, %ecx	/ add in the carry from inc
137	lock
138	cmpxchg8b (%edi)	/ try to stick it in
139	jne	1b
140	movl	%ebx, %eax
141	movl	%ecx, %edx	/ return new value
142	popl	%ebx
143	popl	%edi
144	ret
145	SET_SIZE(atomic_inc_64_nv)
146	SET_SIZE(atomic_inc_64)
147
148	ENTRY(atomic_dec_8)
149	ALTENTRY(atomic_dec_uchar)
150	movl	4(%esp), %eax
151	lock
152	decb	(%eax)
153	ret
154	SET_SIZE(atomic_dec_uchar)
155	SET_SIZE(atomic_dec_8)
156
157	ENTRY(atomic_dec_16)
158	ALTENTRY(atomic_dec_ushort)
159	movl	4(%esp), %eax
160	lock
161	decw	(%eax)
162	ret
163	SET_SIZE(atomic_dec_ushort)
164	SET_SIZE(atomic_dec_16)
165
166	ENTRY(atomic_dec_32)
167	ALTENTRY(atomic_dec_uint)
168	ALTENTRY(atomic_dec_ulong)
169	movl	4(%esp), %eax
170	lock
171	decl	(%eax)
172	ret
173	SET_SIZE(atomic_dec_ulong)
174	SET_SIZE(atomic_dec_uint)
175	SET_SIZE(atomic_dec_32)
176
177	ENTRY(atomic_dec_8_nv)
178	ALTENTRY(atomic_dec_uchar_nv)
179	movl	4(%esp), %edx	/ %edx = target address
180	movb	(%edx), %al	/ %al = old value
1811:
182	leal	-1(%eax), %ecx	/ %cl = new value
183	lock
184	cmpxchgb %cl, (%edx)	/ try to stick it in
185	jne	1b
186	movzbl	%cl, %eax	/ return new value
187	ret
188	SET_SIZE(atomic_dec_uchar_nv)
189	SET_SIZE(atomic_dec_8_nv)
190
191	ENTRY(atomic_dec_16_nv)
192	ALTENTRY(atomic_dec_ushort_nv)
193	movl	4(%esp), %edx	/ %edx = target address
194	movw	(%edx), %ax	/ %ax = old value
1951:
196	leal	-1(%eax), %ecx	/ %cx = new value
197	lock
198	cmpxchgw %cx, (%edx)	/ try to stick it in
199	jne	1b
200	movzwl	%cx, %eax	/ return new value
201	ret
202	SET_SIZE(atomic_dec_ushort_nv)
203	SET_SIZE(atomic_dec_16_nv)
204
205	ENTRY(atomic_dec_32_nv)
206	ALTENTRY(atomic_dec_uint_nv)
207	ALTENTRY(atomic_dec_ulong_nv)
208	movl	4(%esp), %edx	/ %edx = target address
209	movl	(%edx), %eax	/ %eax = old value
2101:
211	leal	-1(%eax), %ecx	/ %ecx = new value
212	lock
213	cmpxchgl %ecx, (%edx)	/ try to stick it in
214	jne	1b
215	movl	%ecx, %eax	/ return new value
216	ret
217	SET_SIZE(atomic_dec_ulong_nv)
218	SET_SIZE(atomic_dec_uint_nv)
219	SET_SIZE(atomic_dec_32_nv)
220
221	ENTRY(atomic_dec_64)
222	ALTENTRY(atomic_dec_64_nv)
223	pushl	%edi
224	pushl	%ebx
225	movl	12(%esp), %edi	/ %edi = target address
226	movl	(%edi), %eax
227	movl	4(%edi), %edx	/ %edx:%eax = old value
2281:
229	xorl	%ebx, %ebx
230	xorl	%ecx, %ecx
231	not	%ecx
232	not	%ebx		/ %ecx:%ebx = -1
233	addl	%eax, %ebx
234	adcl	%edx, %ecx	/ add in the carry from inc
235	lock
236	cmpxchg8b (%edi)	/ try to stick it in
237	jne	1b
238	movl	%ebx, %eax
239	movl	%ecx, %edx	/ return new value
240	popl	%ebx
241	popl	%edi
242	ret
243	SET_SIZE(atomic_dec_64_nv)
244	SET_SIZE(atomic_dec_64)
245
246	ENTRY(atomic_add_8)
247	ALTENTRY(atomic_add_char)
248	movl	4(%esp), %eax
249	movl	8(%esp), %ecx
250	lock
251	addb	%cl, (%eax)
252	ret
253	SET_SIZE(atomic_add_char)
254	SET_SIZE(atomic_add_8)
255
256	ENTRY(atomic_add_16)
257	ALTENTRY(atomic_add_short)
258	movl	4(%esp), %eax
259	movl	8(%esp), %ecx
260	lock
261	addw	%cx, (%eax)
262	ret
263	SET_SIZE(atomic_add_short)
264	SET_SIZE(atomic_add_16)
265
266	ENTRY(atomic_add_32)
267	ALTENTRY(atomic_add_int)
268	ALTENTRY(atomic_add_ptr)
269	ALTENTRY(atomic_add_long)
270	movl	4(%esp), %eax
271	movl	8(%esp), %ecx
272	lock
273	addl	%ecx, (%eax)
274	ret
275	SET_SIZE(atomic_add_long)
276	SET_SIZE(atomic_add_ptr)
277	SET_SIZE(atomic_add_int)
278	SET_SIZE(atomic_add_32)
279
280	ENTRY(atomic_or_8)
281	ALTENTRY(atomic_or_uchar)
282	movl	4(%esp), %eax
283	movb	8(%esp), %cl
284	lock
285	orb	%cl, (%eax)
286	ret
287	SET_SIZE(atomic_or_uchar)
288	SET_SIZE(atomic_or_8)
289
290	ENTRY(atomic_or_16)
291	ALTENTRY(atomic_or_ushort)
292	movl	4(%esp), %eax
293	movw	8(%esp), %cx
294	lock
295	orw	%cx, (%eax)
296	ret
297	SET_SIZE(atomic_or_ushort)
298	SET_SIZE(atomic_or_16)
299
300	ENTRY(atomic_or_32)
301	ALTENTRY(atomic_or_uint)
302	ALTENTRY(atomic_or_ulong)
303	movl	4(%esp), %eax
304	movl	8(%esp), %ecx
305	lock
306	orl	%ecx, (%eax)
307	ret
308	SET_SIZE(atomic_or_ulong)
309	SET_SIZE(atomic_or_uint)
310	SET_SIZE(atomic_or_32)
311
312	ENTRY(atomic_and_8)
313	ALTENTRY(atomic_and_uchar)
314	movl	4(%esp), %eax
315	movb	8(%esp), %cl
316	lock
317	andb	%cl, (%eax)
318	ret
319	SET_SIZE(atomic_and_uchar)
320	SET_SIZE(atomic_and_8)
321
322	ENTRY(atomic_and_16)
323	ALTENTRY(atomic_and_ushort)
324	movl	4(%esp), %eax
325	movw	8(%esp), %cx
326	lock
327	andw	%cx, (%eax)
328	ret
329	SET_SIZE(atomic_and_ushort)
330	SET_SIZE(atomic_and_16)
331
332	ENTRY(atomic_and_32)
333	ALTENTRY(atomic_and_uint)
334	ALTENTRY(atomic_and_ulong)
335	movl	4(%esp), %eax
336	movl	8(%esp), %ecx
337	lock
338	andl	%ecx, (%eax)
339	ret
340	SET_SIZE(atomic_and_ulong)
341	SET_SIZE(atomic_and_uint)
342	SET_SIZE(atomic_and_32)
343
344	ENTRY(atomic_add_8_nv)
345	ALTENTRY(atomic_add_char_nv)
346	movl	4(%esp), %edx	/ %edx = target address
347	movb	(%edx), %al	/ %al = old value
3481:
349	movl	8(%esp), %ecx	/ %ecx = delta
350	addb	%al, %cl	/ %cl = new value
351	lock
352	cmpxchgb %cl, (%edx)	/ try to stick it in
353	jne	1b
354	movzbl	%cl, %eax	/ return new value
355	ret
356	SET_SIZE(atomic_add_char_nv)
357	SET_SIZE(atomic_add_8_nv)
358
359	ENTRY(atomic_add_16_nv)
360	ALTENTRY(atomic_add_short_nv)
361	movl	4(%esp), %edx	/ %edx = target address
362	movw	(%edx), %ax	/ %ax = old value
3631:
364	movl	8(%esp), %ecx	/ %ecx = delta
365	addw	%ax, %cx	/ %cx = new value
366	lock
367	cmpxchgw %cx, (%edx)	/ try to stick it in
368	jne	1b
369	movzwl	%cx, %eax	/ return new value
370	ret
371	SET_SIZE(atomic_add_short_nv)
372	SET_SIZE(atomic_add_16_nv)
373
374	ENTRY(atomic_add_32_nv)
375	ALTENTRY(atomic_add_int_nv)
376	ALTENTRY(atomic_add_ptr_nv)
377	ALTENTRY(atomic_add_long_nv)
378	movl	4(%esp), %edx	/ %edx = target address
379	movl	(%edx), %eax	/ %eax = old value
3801:
381	movl	8(%esp), %ecx	/ %ecx = delta
382	addl	%eax, %ecx	/ %ecx = new value
383	lock
384	cmpxchgl %ecx, (%edx)	/ try to stick it in
385	jne	1b
386	movl	%ecx, %eax	/ return new value
387	ret
388	SET_SIZE(atomic_add_long_nv)
389	SET_SIZE(atomic_add_ptr_nv)
390	SET_SIZE(atomic_add_int_nv)
391	SET_SIZE(atomic_add_32_nv)
392
393	ENTRY(atomic_add_64)
394	ALTENTRY(atomic_add_64_nv)
395	pushl	%edi
396	pushl	%ebx
397	movl	12(%esp), %edi	/ %edi = target address
398	movl	(%edi), %eax
399	movl	4(%edi), %edx	/ %edx:%eax = old value
4001:
401	movl	16(%esp), %ebx
402	movl	20(%esp), %ecx	/ %ecx:%ebx = delta
403	addl	%eax, %ebx
404	adcl	%edx, %ecx	/ %ecx:%ebx = new value
405	lock
406	cmpxchg8b (%edi)	/ try to stick it in
407	jne	1b
408	movl	%ebx, %eax
409	movl	%ecx, %edx	/ return new value
410	popl	%ebx
411	popl	%edi
412	ret
413	SET_SIZE(atomic_add_64_nv)
414	SET_SIZE(atomic_add_64)
415
416	ENTRY(atomic_or_8_nv)
417	ALTENTRY(atomic_or_uchar_nv)
418	movl	4(%esp), %edx	/ %edx = target address
419	movb	(%edx), %al	/ %al = old value
4201:
421	movl	8(%esp), %ecx	/ %ecx = delta
422	orb	%al, %cl	/ %cl = new value
423	lock
424	cmpxchgb %cl, (%edx)	/ try to stick it in
425	jne	1b
426	movzbl	%cl, %eax	/ return new value
427	ret
428	SET_SIZE(atomic_or_uchar_nv)
429	SET_SIZE(atomic_or_8_nv)
430
431	ENTRY(atomic_or_16_nv)
432	ALTENTRY(atomic_or_ushort_nv)
433	movl	4(%esp), %edx	/ %edx = target address
434	movw	(%edx), %ax	/ %ax = old value
4351:
436	movl	8(%esp), %ecx	/ %ecx = delta
437	orw	%ax, %cx	/ %cx = new value
438	lock
439	cmpxchgw %cx, (%edx)	/ try to stick it in
440	jne	1b
441	movzwl	%cx, %eax	/ return new value
442	ret
443	SET_SIZE(atomic_or_ushort_nv)
444	SET_SIZE(atomic_or_16_nv)
445
446	ENTRY(atomic_or_32_nv)
447	ALTENTRY(atomic_or_uint_nv)
448	ALTENTRY(atomic_or_ulong_nv)
449	movl	4(%esp), %edx	/ %edx = target address
450	movl	(%edx), %eax	/ %eax = old value
4511:
452	movl	8(%esp), %ecx	/ %ecx = delta
453	orl	%eax, %ecx	/ %ecx = new value
454	lock
455	cmpxchgl %ecx, (%edx)	/ try to stick it in
456	jne	1b
457	movl	%ecx, %eax	/ return new value
458	ret
459	SET_SIZE(atomic_or_ulong_nv)
460	SET_SIZE(atomic_or_uint_nv)
461	SET_SIZE(atomic_or_32_nv)
462
463	ENTRY(atomic_or_64)
464	ALTENTRY(atomic_or_64_nv)
465	pushl	%edi
466	pushl	%ebx
467	movl	12(%esp), %edi	/ %edi = target address
468	movl	(%edi), %eax
469	movl	4(%edi), %edx	/ %edx:%eax = old value
4701:
471	movl	16(%esp), %ebx
472	movl	20(%esp), %ecx	/ %ecx:%ebx = delta
473	orl	%eax, %ebx
474	orl	%edx, %ecx	/ %ecx:%ebx = new value
475	lock
476	cmpxchg8b (%edi)	/ try to stick it in
477	jne	1b
478	movl	%ebx, %eax
479	movl	%ecx, %edx	/ return new value
480	popl	%ebx
481	popl	%edi
482	ret
483	SET_SIZE(atomic_or_64_nv)
484	SET_SIZE(atomic_or_64)
485
486	ENTRY(atomic_and_8_nv)
487	ALTENTRY(atomic_and_uchar_nv)
488	movl	4(%esp), %edx	/ %edx = target address
489	movb	(%edx), %al	/ %al = old value
4901:
491	movl	8(%esp), %ecx	/ %ecx = delta
492	andb	%al, %cl	/ %cl = new value
493	lock
494	cmpxchgb %cl, (%edx)	/ try to stick it in
495	jne	1b
496	movzbl	%cl, %eax	/ return new value
497	ret
498	SET_SIZE(atomic_and_uchar_nv)
499	SET_SIZE(atomic_and_8_nv)
500
501	ENTRY(atomic_and_16_nv)
502	ALTENTRY(atomic_and_ushort_nv)
503	movl	4(%esp), %edx	/ %edx = target address
504	movw	(%edx), %ax	/ %ax = old value
5051:
506	movl	8(%esp), %ecx	/ %ecx = delta
507	andw	%ax, %cx	/ %cx = new value
508	lock
509	cmpxchgw %cx, (%edx)	/ try to stick it in
510	jne	1b
511	movzwl	%cx, %eax	/ return new value
512	ret
513	SET_SIZE(atomic_and_ushort_nv)
514	SET_SIZE(atomic_and_16_nv)
515
516	ENTRY(atomic_and_32_nv)
517	ALTENTRY(atomic_and_uint_nv)
518	ALTENTRY(atomic_and_ulong_nv)
519	movl	4(%esp), %edx	/ %edx = target address
520	movl	(%edx), %eax	/ %eax = old value
5211:
522	movl	8(%esp), %ecx	/ %ecx = delta
523	andl	%eax, %ecx	/ %ecx = new value
524	lock
525	cmpxchgl %ecx, (%edx)	/ try to stick it in
526	jne	1b
527	movl	%ecx, %eax	/ return new value
528	ret
529	SET_SIZE(atomic_and_ulong_nv)
530	SET_SIZE(atomic_and_uint_nv)
531	SET_SIZE(atomic_and_32_nv)
532
533	ENTRY(atomic_and_64)
534	ALTENTRY(atomic_and_64_nv)
535	pushl	%edi
536	pushl	%ebx
537	movl	12(%esp), %edi	/ %edi = target address
538	movl	(%edi), %eax
539	movl	4(%edi), %edx	/ %edx:%eax = old value
5401:
541	movl	16(%esp), %ebx
542	movl	20(%esp), %ecx	/ %ecx:%ebx = delta
543	andl	%eax, %ebx
544	andl	%edx, %ecx	/ %ecx:%ebx = new value
545	lock
546	cmpxchg8b (%edi)	/ try to stick it in
547	jne	1b
548	movl	%ebx, %eax
549	movl	%ecx, %edx	/ return new value
550	popl	%ebx
551	popl	%edi
552	ret
553	SET_SIZE(atomic_and_64_nv)
554	SET_SIZE(atomic_and_64)
555
556	ENTRY(atomic_cas_8)
557	ALTENTRY(atomic_cas_uchar)
558	movl	4(%esp), %edx
559	movzbl	8(%esp), %eax
560	movb	12(%esp), %cl
561	lock
562	cmpxchgb %cl, (%edx)
563	ret
564	SET_SIZE(atomic_cas_uchar)
565	SET_SIZE(atomic_cas_8)
566
567	ENTRY(atomic_cas_16)
568	ALTENTRY(atomic_cas_ushort)
569	movl	4(%esp), %edx
570	movzwl	8(%esp), %eax
571	movw	12(%esp), %cx
572	lock
573	cmpxchgw %cx, (%edx)
574	ret
575	SET_SIZE(atomic_cas_ushort)
576	SET_SIZE(atomic_cas_16)
577
578	ENTRY(atomic_cas_32)
579	ALTENTRY(atomic_cas_uint)
580	ALTENTRY(atomic_cas_ulong)
581	ALTENTRY(atomic_cas_ptr)
582	movl	4(%esp), %edx
583	movl	8(%esp), %eax
584	movl	12(%esp), %ecx
585	lock
586	cmpxchgl %ecx, (%edx)
587	ret
588	SET_SIZE(atomic_cas_ptr)
589	SET_SIZE(atomic_cas_ulong)
590	SET_SIZE(atomic_cas_uint)
591	SET_SIZE(atomic_cas_32)
592
593	ENTRY(atomic_cas_64)
594	pushl	%ebx
595	pushl	%esi
596	movl	12(%esp), %esi
597	movl	16(%esp), %eax
598	movl	20(%esp), %edx
599	movl	24(%esp), %ebx
600	movl	28(%esp), %ecx
601	lock
602	cmpxchg8b (%esi)
603	popl	%esi
604	popl	%ebx
605	ret
606	SET_SIZE(atomic_cas_64)
607
608	ENTRY(atomic_swap_8)
609	ALTENTRY(atomic_swap_uchar)
610	movl	4(%esp), %edx
611	movzbl	8(%esp), %eax
612	lock
613	xchgb	%al, (%edx)
614	ret
615	SET_SIZE(atomic_swap_uchar)
616	SET_SIZE(atomic_swap_8)
617
618	ENTRY(atomic_swap_16)
619	ALTENTRY(atomic_swap_ushort)
620	movl	4(%esp), %edx
621	movzwl	8(%esp), %eax
622	lock
623	xchgw	%ax, (%edx)
624	ret
625	SET_SIZE(atomic_swap_ushort)
626	SET_SIZE(atomic_swap_16)
627
628	ENTRY(atomic_swap_32)
629	ALTENTRY(atomic_swap_uint)
630	ALTENTRY(atomic_swap_ptr)
631	ALTENTRY(atomic_swap_ulong)
632	movl	4(%esp), %edx
633	movl	8(%esp), %eax
634	lock
635	xchgl	%eax, (%edx)
636	ret
637	SET_SIZE(atomic_swap_ulong)
638	SET_SIZE(atomic_swap_ptr)
639	SET_SIZE(atomic_swap_uint)
640	SET_SIZE(atomic_swap_32)
641
642	ENTRY(atomic_swap_64)
643	pushl	%esi
644	pushl	%ebx
645	movl	12(%esp), %esi
646	movl	16(%esp), %ebx
647	movl	20(%esp), %ecx
648	movl	(%esi), %eax
649	movl	4(%esi), %edx	/ %edx:%eax = old value
6501:
651	lock
652	cmpxchg8b (%esi)
653	jne	1b
654	popl	%ebx
655	popl	%esi
656	ret
657	SET_SIZE(atomic_swap_64)
658
659	ENTRY(atomic_set_long_excl)
660	movl	4(%esp), %edx	/ %edx = target address
661	movl	8(%esp), %ecx	/ %ecx = bit id
662	xorl	%eax, %eax
663	lock
664	btsl	%ecx, (%edx)
665	jnc	1f
666	decl	%eax		/ return -1
6671:
668	ret
669	SET_SIZE(atomic_set_long_excl)
670
671	ENTRY(atomic_clear_long_excl)
672	movl	4(%esp), %edx	/ %edx = target address
673	movl	8(%esp), %ecx	/ %ecx = bit id
674	xorl	%eax, %eax
675	lock
676	btrl	%ecx, (%edx)
677	jc	1f
678	decl	%eax		/ return -1
6791:
680	ret
681	SET_SIZE(atomic_clear_long_excl)
682
683#if !defined(_KERNEL)
684
685	ENTRY(membar_enter)
686	ALTENTRY(membar_exit)
687	ALTENTRY(membar_producer)
688	ALTENTRY(membar_consumer)
689	lock
690	xorl	$0, (%esp)
691	ret
692	SET_SIZE(membar_consumer)
693	SET_SIZE(membar_producer)
694	SET_SIZE(membar_exit)
695	SET_SIZE(membar_enter)
696
697#endif	/* !_KERNEL */
698