xref: /titanic_44/usr/src/common/atomic/i386/atomic.s (revision 8793b36b40d14ad0a0fecc97738dc118a928f46c)
1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21
22/*
23 * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
24 * Use is subject to license terms.
25 */
26
27	.file	"atomic.s"
28
29#include <sys/asm_linkage.h>
30
31#if defined(_KERNEL)
32	/*
33	 * Legacy kernel interfaces; they will go away (eventually).
34	 */
35	ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
36	ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
37	ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
38	ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
39	ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
40	ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
41	ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
42#endif
43
44	ENTRY(atomic_inc_8)
45	ALTENTRY(atomic_inc_uchar)
46	movl	4(%esp), %eax
47	lock
48	incb	(%eax)
49	ret
50	SET_SIZE(atomic_inc_uchar)
51	SET_SIZE(atomic_inc_8)
52
53	ENTRY(atomic_inc_16)
54	ALTENTRY(atomic_inc_ushort)
55	movl	4(%esp), %eax
56	lock
57	incw	(%eax)
58	ret
59	SET_SIZE(atomic_inc_ushort)
60	SET_SIZE(atomic_inc_16)
61
62	ENTRY(atomic_inc_32)
63	ALTENTRY(atomic_inc_uint)
64	ALTENTRY(atomic_inc_ulong)
65	movl	4(%esp), %eax
66	lock
67	incl	(%eax)
68	ret
69	SET_SIZE(atomic_inc_ulong)
70	SET_SIZE(atomic_inc_uint)
71	SET_SIZE(atomic_inc_32)
72
73	ENTRY(atomic_inc_8_nv)
74	ALTENTRY(atomic_inc_uchar_nv)
75	movl	4(%esp), %edx	/ %edx = target address
76	movb	(%edx), %al	/ %al = old value
771:
78	leal	1(%eax), %ecx	/ %cl = new value
79	lock
80	cmpxchgb %cl, (%edx)	/ try to stick it in
81	jne	1b
82	movzbl	%cl, %eax	/ return new value
83	ret
84	SET_SIZE(atomic_inc_uchar_nv)
85	SET_SIZE(atomic_inc_8_nv)
86
87	ENTRY(atomic_inc_16_nv)
88	ALTENTRY(atomic_inc_ushort_nv)
89	movl	4(%esp), %edx	/ %edx = target address
90	movw	(%edx), %ax	/ %ax = old value
911:
92	leal	1(%eax), %ecx	/ %cx = new value
93	lock
94	cmpxchgw %cx, (%edx)	/ try to stick it in
95	jne	1b
96	movzwl	%cx, %eax	/ return new value
97	ret
98	SET_SIZE(atomic_inc_ushort_nv)
99	SET_SIZE(atomic_inc_16_nv)
100
101	ENTRY(atomic_inc_32_nv)
102	ALTENTRY(atomic_inc_uint_nv)
103	ALTENTRY(atomic_inc_ulong_nv)
104	movl	4(%esp), %edx	/ %edx = target address
105	movl	(%edx), %eax	/ %eax = old value
1061:
107	leal	1(%eax), %ecx	/ %ecx = new value
108	lock
109	cmpxchgl %ecx, (%edx)	/ try to stick it in
110	jne	1b
111	movl	%ecx, %eax	/ return new value
112	ret
113	SET_SIZE(atomic_inc_ulong_nv)
114	SET_SIZE(atomic_inc_uint_nv)
115	SET_SIZE(atomic_inc_32_nv)
116
117	/*
118	 * NOTE: If atomic_inc_64 and atomic_inc_64_nv are ever
119	 * separated, you need to also edit the libc i386 platform
120	 * specific mapfile and remove the NODYNSORT attribute
121	 * from atomic_inc_64_nv.
122	 */
123	ENTRY(atomic_inc_64)
124	ALTENTRY(atomic_inc_64_nv)
125	pushl	%edi
126	pushl	%ebx
127	movl	12(%esp), %edi	/ %edi = target address
128	movl	(%edi), %eax
129	movl	4(%edi), %edx	/ %edx:%eax = old value
1301:
131	xorl	%ebx, %ebx
132	xorl	%ecx, %ecx
133	incl	%ebx		/ %ecx:%ebx = 1
134	addl	%eax, %ebx
135	adcl	%edx, %ecx	/ add in the carry from inc
136	lock
137	cmpxchg8b (%edi)	/ try to stick it in
138	jne	1b
139	movl	%ebx, %eax
140	movl	%ecx, %edx	/ return new value
141	popl	%ebx
142	popl	%edi
143	ret
144	SET_SIZE(atomic_inc_64_nv)
145	SET_SIZE(atomic_inc_64)
146
147	ENTRY(atomic_dec_8)
148	ALTENTRY(atomic_dec_uchar)
149	movl	4(%esp), %eax
150	lock
151	decb	(%eax)
152	ret
153	SET_SIZE(atomic_dec_uchar)
154	SET_SIZE(atomic_dec_8)
155
156	ENTRY(atomic_dec_16)
157	ALTENTRY(atomic_dec_ushort)
158	movl	4(%esp), %eax
159	lock
160	decw	(%eax)
161	ret
162	SET_SIZE(atomic_dec_ushort)
163	SET_SIZE(atomic_dec_16)
164
165	ENTRY(atomic_dec_32)
166	ALTENTRY(atomic_dec_uint)
167	ALTENTRY(atomic_dec_ulong)
168	movl	4(%esp), %eax
169	lock
170	decl	(%eax)
171	ret
172	SET_SIZE(atomic_dec_ulong)
173	SET_SIZE(atomic_dec_uint)
174	SET_SIZE(atomic_dec_32)
175
176	ENTRY(atomic_dec_8_nv)
177	ALTENTRY(atomic_dec_uchar_nv)
178	movl	4(%esp), %edx	/ %edx = target address
179	movb	(%edx), %al	/ %al = old value
1801:
181	leal	-1(%eax), %ecx	/ %cl = new value
182	lock
183	cmpxchgb %cl, (%edx)	/ try to stick it in
184	jne	1b
185	movzbl	%cl, %eax	/ return new value
186	ret
187	SET_SIZE(atomic_dec_uchar_nv)
188	SET_SIZE(atomic_dec_8_nv)
189
190	ENTRY(atomic_dec_16_nv)
191	ALTENTRY(atomic_dec_ushort_nv)
192	movl	4(%esp), %edx	/ %edx = target address
193	movw	(%edx), %ax	/ %ax = old value
1941:
195	leal	-1(%eax), %ecx	/ %cx = new value
196	lock
197	cmpxchgw %cx, (%edx)	/ try to stick it in
198	jne	1b
199	movzwl	%cx, %eax	/ return new value
200	ret
201	SET_SIZE(atomic_dec_ushort_nv)
202	SET_SIZE(atomic_dec_16_nv)
203
204	ENTRY(atomic_dec_32_nv)
205	ALTENTRY(atomic_dec_uint_nv)
206	ALTENTRY(atomic_dec_ulong_nv)
207	movl	4(%esp), %edx	/ %edx = target address
208	movl	(%edx), %eax	/ %eax = old value
2091:
210	leal	-1(%eax), %ecx	/ %ecx = new value
211	lock
212	cmpxchgl %ecx, (%edx)	/ try to stick it in
213	jne	1b
214	movl	%ecx, %eax	/ return new value
215	ret
216	SET_SIZE(atomic_dec_ulong_nv)
217	SET_SIZE(atomic_dec_uint_nv)
218	SET_SIZE(atomic_dec_32_nv)
219
220	/*
221	 * NOTE: If atomic_dec_64 and atomic_dec_64_nv are ever
222	 * separated, it is important to edit the libc i386 platform
223	 * specific mapfile and remove the NODYNSORT attribute
224	 * from atomic_dec_64_nv.
225	 */
226	ENTRY(atomic_dec_64)
227	ALTENTRY(atomic_dec_64_nv)
228	pushl	%edi
229	pushl	%ebx
230	movl	12(%esp), %edi	/ %edi = target address
231	movl	(%edi), %eax
232	movl	4(%edi), %edx	/ %edx:%eax = old value
2331:
234	xorl	%ebx, %ebx
235	xorl	%ecx, %ecx
236	not	%ecx
237	not	%ebx		/ %ecx:%ebx = -1
238	addl	%eax, %ebx
239	adcl	%edx, %ecx	/ add in the carry from inc
240	lock
241	cmpxchg8b (%edi)	/ try to stick it in
242	jne	1b
243	movl	%ebx, %eax
244	movl	%ecx, %edx	/ return new value
245	popl	%ebx
246	popl	%edi
247	ret
248	SET_SIZE(atomic_dec_64_nv)
249	SET_SIZE(atomic_dec_64)
250
251	ENTRY(atomic_add_8)
252	ALTENTRY(atomic_add_char)
253	movl	4(%esp), %eax
254	movl	8(%esp), %ecx
255	lock
256	addb	%cl, (%eax)
257	ret
258	SET_SIZE(atomic_add_char)
259	SET_SIZE(atomic_add_8)
260
261	ENTRY(atomic_add_16)
262	ALTENTRY(atomic_add_short)
263	movl	4(%esp), %eax
264	movl	8(%esp), %ecx
265	lock
266	addw	%cx, (%eax)
267	ret
268	SET_SIZE(atomic_add_short)
269	SET_SIZE(atomic_add_16)
270
271	ENTRY(atomic_add_32)
272	ALTENTRY(atomic_add_int)
273	ALTENTRY(atomic_add_ptr)
274	ALTENTRY(atomic_add_long)
275	movl	4(%esp), %eax
276	movl	8(%esp), %ecx
277	lock
278	addl	%ecx, (%eax)
279	ret
280	SET_SIZE(atomic_add_long)
281	SET_SIZE(atomic_add_ptr)
282	SET_SIZE(atomic_add_int)
283	SET_SIZE(atomic_add_32)
284
285	ENTRY(atomic_or_8)
286	ALTENTRY(atomic_or_uchar)
287	movl	4(%esp), %eax
288	movb	8(%esp), %cl
289	lock
290	orb	%cl, (%eax)
291	ret
292	SET_SIZE(atomic_or_uchar)
293	SET_SIZE(atomic_or_8)
294
295	ENTRY(atomic_or_16)
296	ALTENTRY(atomic_or_ushort)
297	movl	4(%esp), %eax
298	movw	8(%esp), %cx
299	lock
300	orw	%cx, (%eax)
301	ret
302	SET_SIZE(atomic_or_ushort)
303	SET_SIZE(atomic_or_16)
304
305	ENTRY(atomic_or_32)
306	ALTENTRY(atomic_or_uint)
307	ALTENTRY(atomic_or_ulong)
308	movl	4(%esp), %eax
309	movl	8(%esp), %ecx
310	lock
311	orl	%ecx, (%eax)
312	ret
313	SET_SIZE(atomic_or_ulong)
314	SET_SIZE(atomic_or_uint)
315	SET_SIZE(atomic_or_32)
316
317	ENTRY(atomic_and_8)
318	ALTENTRY(atomic_and_uchar)
319	movl	4(%esp), %eax
320	movb	8(%esp), %cl
321	lock
322	andb	%cl, (%eax)
323	ret
324	SET_SIZE(atomic_and_uchar)
325	SET_SIZE(atomic_and_8)
326
327	ENTRY(atomic_and_16)
328	ALTENTRY(atomic_and_ushort)
329	movl	4(%esp), %eax
330	movw	8(%esp), %cx
331	lock
332	andw	%cx, (%eax)
333	ret
334	SET_SIZE(atomic_and_ushort)
335	SET_SIZE(atomic_and_16)
336
337	ENTRY(atomic_and_32)
338	ALTENTRY(atomic_and_uint)
339	ALTENTRY(atomic_and_ulong)
340	movl	4(%esp), %eax
341	movl	8(%esp), %ecx
342	lock
343	andl	%ecx, (%eax)
344	ret
345	SET_SIZE(atomic_and_ulong)
346	SET_SIZE(atomic_and_uint)
347	SET_SIZE(atomic_and_32)
348
349	ENTRY(atomic_add_8_nv)
350	ALTENTRY(atomic_add_char_nv)
351	movl	4(%esp), %edx	/ %edx = target address
352	movb	(%edx), %al	/ %al = old value
3531:
354	movl	8(%esp), %ecx	/ %ecx = delta
355	addb	%al, %cl	/ %cl = new value
356	lock
357	cmpxchgb %cl, (%edx)	/ try to stick it in
358	jne	1b
359	movzbl	%cl, %eax	/ return new value
360	ret
361	SET_SIZE(atomic_add_char_nv)
362	SET_SIZE(atomic_add_8_nv)
363
364	ENTRY(atomic_add_16_nv)
365	ALTENTRY(atomic_add_short_nv)
366	movl	4(%esp), %edx	/ %edx = target address
367	movw	(%edx), %ax	/ %ax = old value
3681:
369	movl	8(%esp), %ecx	/ %ecx = delta
370	addw	%ax, %cx	/ %cx = new value
371	lock
372	cmpxchgw %cx, (%edx)	/ try to stick it in
373	jne	1b
374	movzwl	%cx, %eax	/ return new value
375	ret
376	SET_SIZE(atomic_add_short_nv)
377	SET_SIZE(atomic_add_16_nv)
378
379	ENTRY(atomic_add_32_nv)
380	ALTENTRY(atomic_add_int_nv)
381	ALTENTRY(atomic_add_ptr_nv)
382	ALTENTRY(atomic_add_long_nv)
383	movl	4(%esp), %edx	/ %edx = target address
384	movl	(%edx), %eax	/ %eax = old value
3851:
386	movl	8(%esp), %ecx	/ %ecx = delta
387	addl	%eax, %ecx	/ %ecx = new value
388	lock
389	cmpxchgl %ecx, (%edx)	/ try to stick it in
390	jne	1b
391	movl	%ecx, %eax	/ return new value
392	ret
393	SET_SIZE(atomic_add_long_nv)
394	SET_SIZE(atomic_add_ptr_nv)
395	SET_SIZE(atomic_add_int_nv)
396	SET_SIZE(atomic_add_32_nv)
397
398	/*
399	 * NOTE: If atomic_add_64 and atomic_add_64_nv are ever
400	 * separated, it is important to edit the libc i386 platform
401	 * specific mapfile and remove the NODYNSORT attribute
402	 * from atomic_add_64_nv.
403	 */
404	ENTRY(atomic_add_64)
405	ALTENTRY(atomic_add_64_nv)
406	pushl	%edi
407	pushl	%ebx
408	movl	12(%esp), %edi	/ %edi = target address
409	movl	(%edi), %eax
410	movl	4(%edi), %edx	/ %edx:%eax = old value
4111:
412	movl	16(%esp), %ebx
413	movl	20(%esp), %ecx	/ %ecx:%ebx = delta
414	addl	%eax, %ebx
415	adcl	%edx, %ecx	/ %ecx:%ebx = new value
416	lock
417	cmpxchg8b (%edi)	/ try to stick it in
418	jne	1b
419	movl	%ebx, %eax
420	movl	%ecx, %edx	/ return new value
421	popl	%ebx
422	popl	%edi
423	ret
424	SET_SIZE(atomic_add_64_nv)
425	SET_SIZE(atomic_add_64)
426
427	ENTRY(atomic_or_8_nv)
428	ALTENTRY(atomic_or_uchar_nv)
429	movl	4(%esp), %edx	/ %edx = target address
430	movb	(%edx), %al	/ %al = old value
4311:
432	movl	8(%esp), %ecx	/ %ecx = delta
433	orb	%al, %cl	/ %cl = new value
434	lock
435	cmpxchgb %cl, (%edx)	/ try to stick it in
436	jne	1b
437	movzbl	%cl, %eax	/ return new value
438	ret
439	SET_SIZE(atomic_or_uchar_nv)
440	SET_SIZE(atomic_or_8_nv)
441
442	ENTRY(atomic_or_16_nv)
443	ALTENTRY(atomic_or_ushort_nv)
444	movl	4(%esp), %edx	/ %edx = target address
445	movw	(%edx), %ax	/ %ax = old value
4461:
447	movl	8(%esp), %ecx	/ %ecx = delta
448	orw	%ax, %cx	/ %cx = new value
449	lock
450	cmpxchgw %cx, (%edx)	/ try to stick it in
451	jne	1b
452	movzwl	%cx, %eax	/ return new value
453	ret
454	SET_SIZE(atomic_or_ushort_nv)
455	SET_SIZE(atomic_or_16_nv)
456
457	ENTRY(atomic_or_32_nv)
458	ALTENTRY(atomic_or_uint_nv)
459	ALTENTRY(atomic_or_ulong_nv)
460	movl	4(%esp), %edx	/ %edx = target address
461	movl	(%edx), %eax	/ %eax = old value
4621:
463	movl	8(%esp), %ecx	/ %ecx = delta
464	orl	%eax, %ecx	/ %ecx = new value
465	lock
466	cmpxchgl %ecx, (%edx)	/ try to stick it in
467	jne	1b
468	movl	%ecx, %eax	/ return new value
469	ret
470	SET_SIZE(atomic_or_ulong_nv)
471	SET_SIZE(atomic_or_uint_nv)
472	SET_SIZE(atomic_or_32_nv)
473
474	/*
475	 * NOTE: If atomic_or_64 and atomic_or_64_nv are ever
476	 * separated, it is important to edit the libc i386 platform
477	 * specific mapfile and remove the NODYNSORT attribute
478	 * from atomic_or_64_nv.
479	 */
480	ENTRY(atomic_or_64)
481	ALTENTRY(atomic_or_64_nv)
482	pushl	%edi
483	pushl	%ebx
484	movl	12(%esp), %edi	/ %edi = target address
485	movl	(%edi), %eax
486	movl	4(%edi), %edx	/ %edx:%eax = old value
4871:
488	movl	16(%esp), %ebx
489	movl	20(%esp), %ecx	/ %ecx:%ebx = delta
490	orl	%eax, %ebx
491	orl	%edx, %ecx	/ %ecx:%ebx = new value
492	lock
493	cmpxchg8b (%edi)	/ try to stick it in
494	jne	1b
495	movl	%ebx, %eax
496	movl	%ecx, %edx	/ return new value
497	popl	%ebx
498	popl	%edi
499	ret
500	SET_SIZE(atomic_or_64_nv)
501	SET_SIZE(atomic_or_64)
502
503	ENTRY(atomic_and_8_nv)
504	ALTENTRY(atomic_and_uchar_nv)
505	movl	4(%esp), %edx	/ %edx = target address
506	movb	(%edx), %al	/ %al = old value
5071:
508	movl	8(%esp), %ecx	/ %ecx = delta
509	andb	%al, %cl	/ %cl = new value
510	lock
511	cmpxchgb %cl, (%edx)	/ try to stick it in
512	jne	1b
513	movzbl	%cl, %eax	/ return new value
514	ret
515	SET_SIZE(atomic_and_uchar_nv)
516	SET_SIZE(atomic_and_8_nv)
517
518	ENTRY(atomic_and_16_nv)
519	ALTENTRY(atomic_and_ushort_nv)
520	movl	4(%esp), %edx	/ %edx = target address
521	movw	(%edx), %ax	/ %ax = old value
5221:
523	movl	8(%esp), %ecx	/ %ecx = delta
524	andw	%ax, %cx	/ %cx = new value
525	lock
526	cmpxchgw %cx, (%edx)	/ try to stick it in
527	jne	1b
528	movzwl	%cx, %eax	/ return new value
529	ret
530	SET_SIZE(atomic_and_ushort_nv)
531	SET_SIZE(atomic_and_16_nv)
532
533	ENTRY(atomic_and_32_nv)
534	ALTENTRY(atomic_and_uint_nv)
535	ALTENTRY(atomic_and_ulong_nv)
536	movl	4(%esp), %edx	/ %edx = target address
537	movl	(%edx), %eax	/ %eax = old value
5381:
539	movl	8(%esp), %ecx	/ %ecx = delta
540	andl	%eax, %ecx	/ %ecx = new value
541	lock
542	cmpxchgl %ecx, (%edx)	/ try to stick it in
543	jne	1b
544	movl	%ecx, %eax	/ return new value
545	ret
546	SET_SIZE(atomic_and_ulong_nv)
547	SET_SIZE(atomic_and_uint_nv)
548	SET_SIZE(atomic_and_32_nv)
549
550	/*
551	 * NOTE: If atomic_and_64 and atomic_and_64_nv are ever
552	 * separated, it is important to edit the libc i386 platform
553	 * specific mapfile and remove the NODYNSORT attribute
554	 * from atomic_and_64_nv.
555	 */
556	ENTRY(atomic_and_64)
557	ALTENTRY(atomic_and_64_nv)
558	pushl	%edi
559	pushl	%ebx
560	movl	12(%esp), %edi	/ %edi = target address
561	movl	(%edi), %eax
562	movl	4(%edi), %edx	/ %edx:%eax = old value
5631:
564	movl	16(%esp), %ebx
565	movl	20(%esp), %ecx	/ %ecx:%ebx = delta
566	andl	%eax, %ebx
567	andl	%edx, %ecx	/ %ecx:%ebx = new value
568	lock
569	cmpxchg8b (%edi)	/ try to stick it in
570	jne	1b
571	movl	%ebx, %eax
572	movl	%ecx, %edx	/ return new value
573	popl	%ebx
574	popl	%edi
575	ret
576	SET_SIZE(atomic_and_64_nv)
577	SET_SIZE(atomic_and_64)
578
579	ENTRY(atomic_cas_8)
580	ALTENTRY(atomic_cas_uchar)
581	movl	4(%esp), %edx
582	movzbl	8(%esp), %eax
583	movb	12(%esp), %cl
584	lock
585	cmpxchgb %cl, (%edx)
586	ret
587	SET_SIZE(atomic_cas_uchar)
588	SET_SIZE(atomic_cas_8)
589
590	ENTRY(atomic_cas_16)
591	ALTENTRY(atomic_cas_ushort)
592	movl	4(%esp), %edx
593	movzwl	8(%esp), %eax
594	movw	12(%esp), %cx
595	lock
596	cmpxchgw %cx, (%edx)
597	ret
598	SET_SIZE(atomic_cas_ushort)
599	SET_SIZE(atomic_cas_16)
600
601	ENTRY(atomic_cas_32)
602	ALTENTRY(atomic_cas_uint)
603	ALTENTRY(atomic_cas_ulong)
604	ALTENTRY(atomic_cas_ptr)
605	movl	4(%esp), %edx
606	movl	8(%esp), %eax
607	movl	12(%esp), %ecx
608	lock
609	cmpxchgl %ecx, (%edx)
610	ret
611	SET_SIZE(atomic_cas_ptr)
612	SET_SIZE(atomic_cas_ulong)
613	SET_SIZE(atomic_cas_uint)
614	SET_SIZE(atomic_cas_32)
615
616	ENTRY(atomic_cas_64)
617	pushl	%ebx
618	pushl	%esi
619	movl	12(%esp), %esi
620	movl	16(%esp), %eax
621	movl	20(%esp), %edx
622	movl	24(%esp), %ebx
623	movl	28(%esp), %ecx
624	lock
625	cmpxchg8b (%esi)
626	popl	%esi
627	popl	%ebx
628	ret
629	SET_SIZE(atomic_cas_64)
630
631	ENTRY(atomic_swap_8)
632	ALTENTRY(atomic_swap_uchar)
633	movl	4(%esp), %edx
634	movzbl	8(%esp), %eax
635	lock
636	xchgb	%al, (%edx)
637	ret
638	SET_SIZE(atomic_swap_uchar)
639	SET_SIZE(atomic_swap_8)
640
641	ENTRY(atomic_swap_16)
642	ALTENTRY(atomic_swap_ushort)
643	movl	4(%esp), %edx
644	movzwl	8(%esp), %eax
645	lock
646	xchgw	%ax, (%edx)
647	ret
648	SET_SIZE(atomic_swap_ushort)
649	SET_SIZE(atomic_swap_16)
650
651	ENTRY(atomic_swap_32)
652	ALTENTRY(atomic_swap_uint)
653	ALTENTRY(atomic_swap_ptr)
654	ALTENTRY(atomic_swap_ulong)
655	movl	4(%esp), %edx
656	movl	8(%esp), %eax
657	lock
658	xchgl	%eax, (%edx)
659	ret
660	SET_SIZE(atomic_swap_ulong)
661	SET_SIZE(atomic_swap_ptr)
662	SET_SIZE(atomic_swap_uint)
663	SET_SIZE(atomic_swap_32)
664
665	ENTRY(atomic_swap_64)
666	pushl	%esi
667	pushl	%ebx
668	movl	12(%esp), %esi
669	movl	16(%esp), %ebx
670	movl	20(%esp), %ecx
671	movl	(%esi), %eax
672	movl	4(%esi), %edx	/ %edx:%eax = old value
6731:
674	lock
675	cmpxchg8b (%esi)
676	jne	1b
677	popl	%ebx
678	popl	%esi
679	ret
680	SET_SIZE(atomic_swap_64)
681
682	ENTRY(atomic_set_long_excl)
683	movl	4(%esp), %edx	/ %edx = target address
684	movl	8(%esp), %ecx	/ %ecx = bit id
685	xorl	%eax, %eax
686	lock
687	btsl	%ecx, (%edx)
688	jnc	1f
689	decl	%eax		/ return -1
6901:
691	ret
692	SET_SIZE(atomic_set_long_excl)
693
694	ENTRY(atomic_clear_long_excl)
695	movl	4(%esp), %edx	/ %edx = target address
696	movl	8(%esp), %ecx	/ %ecx = bit id
697	xorl	%eax, %eax
698	lock
699	btrl	%ecx, (%edx)
700	jc	1f
701	decl	%eax		/ return -1
7021:
703	ret
704	SET_SIZE(atomic_clear_long_excl)
705
706#if !defined(_KERNEL)
707
708	/*
709	 * NOTE: membar_enter, membar_exit, membar_producer, and
710	 * membar_consumer are all identical routines. We define them
711	 * separately, instead of using ALTENTRY definitions to alias them
712	 * together, so that DTrace and debuggers will see a unique address
713	 * for them, allowing more accurate tracing.
714	*/
715
716
717	ENTRY(membar_enter)
718	lock
719	xorl	$0, (%esp)
720	ret
721	SET_SIZE(membar_enter)
722
723	ENTRY(membar_exit)
724	lock
725	xorl	$0, (%esp)
726	ret
727	SET_SIZE(membar_exit)
728
729	ENTRY(membar_producer)
730	lock
731	xorl	$0, (%esp)
732	ret
733	SET_SIZE(membar_producer)
734
735	ENTRY(membar_consumer)
736	lock
737	xorl	$0, (%esp)
738	ret
739	SET_SIZE(membar_consumer)
740
741#endif	/* !_KERNEL */
742