xref: /titanic_44/usr/src/common/atomic/amd64/atomic.s (revision 8eea8e29cc4374d1ee24c25a07f45af132db3499)
1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License, Version 1.0 only
6 * (the "License").  You may not use this file except in compliance
7 * with the License.
8 *
9 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10 * or http://www.opensolaris.org/os/licensing.
11 * See the License for the specific language governing permissions
12 * and limitations under the License.
13 *
14 * When distributing Covered Code, include this CDDL HEADER in each
15 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16 * If applicable, add the following below this CDDL HEADER, with the
17 * fields enclosed by brackets "[]" replaced with your own identifying
18 * information: Portions Copyright [yyyy] [name of copyright owner]
19 *
20 * CDDL HEADER END
21 */
22/*
23 * Copyright 2005 Sun Microsystems, Inc.  All rights reserved.
24 * Use is subject to license terms.
25 */
26
27	.ident	"%Z%%M%	%I%	%E% SMI"
28
29	.file	"%M%"
30
31#include <sys/asm_linkage.h>
32
33#if defined(_KERNEL)
34	/*
35	 * Legacy kernel interfaces; they will go away (eventually).
36	 */
37	ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
38	ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
39	ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
40	ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
41	ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
42	ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
43	ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
44#else
45	/*
46	 * Include the definitions for the libc weak aliases.
47	 */
48#include "../atomic_asm_weak.h"
49#endif
50
51	ENTRY(atomic_inc_8)
52	ALTENTRY(atomic_inc_uchar)
53	lock
54	incb	(%rdi)
55	ret
56	SET_SIZE(atomic_inc_uchar)
57	SET_SIZE(atomic_inc_8)
58
59	ENTRY(atomic_inc_16)
60	ALTENTRY(atomic_inc_ushort)
61	lock
62	incw	(%rdi)
63	ret
64	SET_SIZE(atomic_inc_ushort)
65	SET_SIZE(atomic_inc_16)
66
67	ENTRY(atomic_inc_32)
68	ALTENTRY(atomic_inc_uint)
69	lock
70	incl	(%rdi)
71	ret
72	SET_SIZE(atomic_inc_uint)
73	SET_SIZE(atomic_inc_32)
74
75	ENTRY(atomic_inc_64)
76	ALTENTRY(atomic_inc_ulong)
77	lock
78	incq	(%rdi)
79	ret
80	SET_SIZE(atomic_inc_ulong)
81	SET_SIZE(atomic_inc_64)
82
83	ENTRY(atomic_inc_8_nv)
84	ALTENTRY(atomic_inc_uchar_nv)
85	movb	(%rdi), %al	/ %al = old value
861:
87	leaq	1(%rax), %rcx	/ %cl = new value
88	lock
89	cmpxchgb %cl, (%rdi)	/ try to stick it in
90	jne	1b
91	movzbl	%cl, %eax	/ return new value
92	ret
93	SET_SIZE(atomic_inc_uchar_nv)
94	SET_SIZE(atomic_inc_8_nv)
95
96	ENTRY(atomic_inc_16_nv)
97	ALTENTRY(atomic_inc_ushort_nv)
98	movw	(%rdi), %ax	/ %ax = old value
991:
100	leaq	1(%rax), %rcx	/ %cx = new value
101	lock
102	cmpxchgw %cx, (%rdi)	/ try to stick it in
103	jne	1b
104	movzwl	%cx, %eax	/ return new value
105	ret
106	SET_SIZE(atomic_inc_ushort_nv)
107	SET_SIZE(atomic_inc_16_nv)
108
109	ENTRY(atomic_inc_32_nv)
110	ALTENTRY(atomic_inc_uint_nv)
111	movl	(%rdi), %eax	/ %eax = old value
1121:
113	leaq	1(%rax), %rcx	/ %ecx = new value
114	lock
115	cmpxchgl %ecx, (%rdi)	/ try to stick it in
116	jne	1b
117	movl	%ecx, %eax	/ return new value
118	ret
119	SET_SIZE(atomic_inc_uint_nv)
120	SET_SIZE(atomic_inc_32_nv)
121
122	ENTRY(atomic_inc_64_nv)
123	ALTENTRY(atomic_inc_ulong_nv)
124	movq	(%rdi), %rax	/ %rax = old value
1251:
126	leaq	1(%rax), %rcx	/ %rcx = new value
127	lock
128	cmpxchgq %rcx, (%rdi)	/ try to stick it in
129	jne	1b
130	movq	%rcx, %rax	/ return new value
131	ret
132	SET_SIZE(atomic_inc_ulong_nv)
133	SET_SIZE(atomic_inc_64_nv)
134
135	ENTRY(atomic_dec_8)
136	ALTENTRY(atomic_dec_uchar)
137	lock
138	decb	(%rdi)
139	ret
140	SET_SIZE(atomic_dec_uchar)
141	SET_SIZE(atomic_dec_8)
142
143	ENTRY(atomic_dec_16)
144	ALTENTRY(atomic_dec_ushort)
145	lock
146	decw	(%rdi)
147	ret
148	SET_SIZE(atomic_dec_ushort)
149	SET_SIZE(atomic_dec_16)
150
151	ENTRY(atomic_dec_32)
152	ALTENTRY(atomic_dec_uint)
153	lock
154	decl	(%rdi)
155	ret
156	SET_SIZE(atomic_dec_uint)
157	SET_SIZE(atomic_dec_32)
158
159	ENTRY(atomic_dec_64)
160	ALTENTRY(atomic_dec_ulong)
161	lock
162	decq	(%rdi)
163	ret
164	SET_SIZE(atomic_dec_ulong)
165	SET_SIZE(atomic_dec_64)
166
167	ENTRY(atomic_dec_8_nv)
168	ALTENTRY(atomic_dec_uchar_nv)
169	movb	(%rdi), %al	/ %al = old value
1701:
171	leaq	-1(%rax), %rcx	/ %cl = new value
172	lock
173	cmpxchgb %cl, (%rdi)	/ try to stick it in
174	jne	1b
175	movzbl	%cl, %eax	/ return new value
176	ret
177	SET_SIZE(atomic_dec_uchar_nv)
178	SET_SIZE(atomic_dec_8_nv)
179
180	ENTRY(atomic_dec_16_nv)
181	ALTENTRY(atomic_dec_ushort_nv)
182	movw	(%rdi), %ax	/ %ax = old value
1831:
184	leaq	-1(%rax), %rcx	/ %cx = new value
185	lock
186	cmpxchgw %cx, (%rdi)	/ try to stick it in
187	jne	1b
188	movzwl	%cx, %eax	/ return new value
189	ret
190	SET_SIZE(atomic_dec_ushort_nv)
191	SET_SIZE(atomic_dec_16_nv)
192
193	ENTRY(atomic_dec_32_nv)
194	ALTENTRY(atomic_dec_uint_nv)
195	movl	(%rdi), %eax	/ %eax = old value
1961:
197	leaq	-1(%rax), %rcx	/ %ecx = new value
198	lock
199	cmpxchgl %ecx, (%rdi)	/ try to stick it in
200	jne	1b
201	movl	%ecx, %eax	/ return new value
202	ret
203	SET_SIZE(atomic_dec_uint_nv)
204	SET_SIZE(atomic_dec_32_nv)
205
206	ENTRY(atomic_dec_64_nv)
207	ALTENTRY(atomic_dec_ulong_nv)
208	movq	(%rdi), %rax	/ %rax = old value
2091:
210	leaq	-1(%rax), %rcx	/ %rcx = new value
211	lock
212	cmpxchgq %rcx, (%rdi)	/ try to stick it in
213	jne	1b
214	movq	%rcx, %rax	/ return new value
215	ret
216	SET_SIZE(atomic_dec_ulong_nv)
217	SET_SIZE(atomic_dec_64_nv)
218
219	ENTRY(atomic_add_8)
220	ALTENTRY(atomic_add_char)
221	lock
222	addb	%sil, (%rdi)
223	ret
224	SET_SIZE(atomic_add_char)
225	SET_SIZE(atomic_add_8)
226
227	ENTRY(atomic_add_16)
228	ALTENTRY(atomic_add_short)
229	lock
230	addw	%si, (%rdi)
231	ret
232	SET_SIZE(atomic_add_short)
233	SET_SIZE(atomic_add_16)
234
235	ENTRY(atomic_add_32)
236	ALTENTRY(atomic_add_int)
237	lock
238	addl	%esi, (%rdi)
239	ret
240	SET_SIZE(atomic_add_int)
241	SET_SIZE(atomic_add_32)
242
243	ENTRY(atomic_add_64)
244	ALTENTRY(atomic_add_ptr)
245	ALTENTRY(atomic_add_long)
246	lock
247	addq	%rsi, (%rdi)
248	ret
249	SET_SIZE(atomic_add_long)
250	SET_SIZE(atomic_add_ptr)
251	SET_SIZE(atomic_add_64)
252
253	ENTRY(atomic_or_8)
254	ALTENTRY(atomic_or_uchar)
255	lock
256	orb	%sil, (%rdi)
257	ret
258	SET_SIZE(atomic_or_uchar)
259	SET_SIZE(atomic_or_8)
260
261	ENTRY(atomic_or_16)
262	ALTENTRY(atomic_or_ushort)
263	lock
264	orw	%si, (%rdi)
265	ret
266	SET_SIZE(atomic_or_ushort)
267	SET_SIZE(atomic_or_16)
268
269	ENTRY(atomic_or_32)
270	ALTENTRY(atomic_or_uint)
271	lock
272	orl	%esi, (%rdi)
273	ret
274	SET_SIZE(atomic_or_uint)
275	SET_SIZE(atomic_or_32)
276
277	ENTRY(atomic_or_64)
278	ALTENTRY(atomic_or_ulong)
279	lock
280	orq	%rsi, (%rdi)
281	ret
282	SET_SIZE(atomic_or_ulong)
283	SET_SIZE(atomic_or_64)
284
285	ENTRY(atomic_and_8)
286	ALTENTRY(atomic_and_uchar)
287	lock
288	andb	%sil, (%rdi)
289	ret
290	SET_SIZE(atomic_and_uchar)
291	SET_SIZE(atomic_and_8)
292
293	ENTRY(atomic_and_16)
294	ALTENTRY(atomic_and_ushort)
295	lock
296	andw	%si, (%rdi)
297	ret
298	SET_SIZE(atomic_and_ushort)
299	SET_SIZE(atomic_and_16)
300
301	ENTRY(atomic_and_32)
302	ALTENTRY(atomic_and_uint)
303	lock
304	andl	%esi, (%rdi)
305	ret
306	SET_SIZE(atomic_and_uint)
307	SET_SIZE(atomic_and_32)
308
309	ENTRY(atomic_and_64)
310	ALTENTRY(atomic_and_ulong)
311	lock
312	andq	%rsi, (%rdi)
313	ret
314	SET_SIZE(atomic_and_ulong)
315	SET_SIZE(atomic_and_64)
316
317	ENTRY(atomic_add_8_nv)
318	ALTENTRY(atomic_add_char_nv)
319	movb	(%rdi), %al	/ %al = old value
3201:
321	movb	%sil, %cl
322	addb	%al, %cl	/ %cl = new value
323	lock
324	cmpxchgb %cl, (%rdi)	/ try to stick it in
325	jne	1b
326	movzbl	%cl, %eax	/ return new value
327	ret
328	SET_SIZE(atomic_add_char_nv)
329	SET_SIZE(atomic_add_8_nv)
330
331	ENTRY(atomic_add_16_nv)
332	ALTENTRY(atomic_add_short_nv)
333	movw	(%rdi), %ax	/ %ax = old value
3341:
335	movw	%si, %cx
336	addw	%ax, %cx	/ %cx = new value
337	lock
338	cmpxchgw %cx, (%rdi)	/ try to stick it in
339	jne	1b
340	movzwl	%cx, %eax	/ return new value
341	ret
342	SET_SIZE(atomic_add_short_nv)
343	SET_SIZE(atomic_add_16_nv)
344
345	ENTRY(atomic_add_32_nv)
346	ALTENTRY(atomic_add_int_nv)
347	movl	(%rdi), %eax
3481:
349	movl	%esi, %ecx
350	addl	%eax, %ecx
351	lock
352	cmpxchgl %ecx, (%rdi)
353	jne	1b
354	movl	%ecx, %eax
355	ret
356	SET_SIZE(atomic_add_int_nv)
357	SET_SIZE(atomic_add_32_nv)
358
359	ENTRY(atomic_add_64_nv)
360	ALTENTRY(atomic_add_ptr_nv)
361	ALTENTRY(atomic_add_long_nv)
362	movq	(%rdi), %rax
3631:
364	movq	%rsi, %rcx
365	addq	%rax, %rcx
366	lock
367	cmpxchgq %rcx, (%rdi)
368	jne	1b
369	movq	%rcx, %rax
370	ret
371	SET_SIZE(atomic_add_long_nv)
372	SET_SIZE(atomic_add_ptr_nv)
373	SET_SIZE(atomic_add_64_nv)
374
375	ENTRY(atomic_and_8_nv)
376	ALTENTRY(atomic_and_uchar_nv)
377	movb	(%rdi), %al	/ %al = old value
3781:
379	movb	%sil, %cl
380	andb	%al, %cl	/ %cl = new value
381	lock
382	cmpxchgb %cl, (%rdi)	/ try to stick it in
383	jne	1b
384	movzbl	%cl, %eax	/ return new value
385	ret
386	SET_SIZE(atomic_and_uchar_nv)
387	SET_SIZE(atomic_and_8_nv)
388
389	ENTRY(atomic_and_16_nv)
390	ALTENTRY(atomic_and_ushort_nv)
391	movw	(%rdi), %ax	/ %ax = old value
3921:
393	movw	%si, %cx
394	andw	%ax, %cx	/ %cx = new value
395	lock
396	cmpxchgw %cx, (%rdi)	/ try to stick it in
397	jne	1b
398	movzwl	%cx, %eax	/ return new value
399	ret
400	SET_SIZE(atomic_and_ushort_nv)
401	SET_SIZE(atomic_and_16_nv)
402
403	ENTRY(atomic_and_32_nv)
404	ALTENTRY(atomic_and_uint_nv)
405	movl	(%rdi), %eax
4061:
407	movl	%esi, %ecx
408	andl	%eax, %ecx
409	lock
410	cmpxchgl %ecx, (%rdi)
411	jne	1b
412	movl	%ecx, %eax
413	ret
414	SET_SIZE(atomic_and_uint_nv)
415	SET_SIZE(atomic_and_32_nv)
416
417	ENTRY(atomic_and_64_nv)
418	ALTENTRY(atomic_and_ulong_nv)
419	movq	(%rdi), %rax
4201:
421	movq	%rsi, %rcx
422	andq	%rax, %rcx
423	lock
424	cmpxchgq %rcx, (%rdi)
425	jne	1b
426	movq	%rcx, %rax
427	ret
428	SET_SIZE(atomic_and_ulong_nv)
429	SET_SIZE(atomic_and_64_nv)
430
431	ENTRY(atomic_or_8_nv)
432	ALTENTRY(atomic_or_uchar_nv)
433	movb	(%rdi), %al	/ %al = old value
4341:
435	movb	%sil, %cl
436	orb	%al, %cl	/ %cl = new value
437	lock
438	cmpxchgb %cl, (%rdi)	/ try to stick it in
439	jne	1b
440	movzbl	%cl, %eax	/ return new value
441	ret
442	SET_SIZE(atomic_and_uchar_nv)
443	SET_SIZE(atomic_and_8_nv)
444
445	ENTRY(atomic_or_16_nv)
446	ALTENTRY(atomic_or_ushort_nv)
447	movw	(%rdi), %ax	/ %ax = old value
4481:
449	movw	%si, %cx
450	orw	%ax, %cx	/ %cx = new value
451	lock
452	cmpxchgw %cx, (%rdi)	/ try to stick it in
453	jne	1b
454	movzwl	%cx, %eax	/ return new value
455	ret
456	SET_SIZE(atomic_or_ushort_nv)
457	SET_SIZE(atomic_or_16_nv)
458
459	ENTRY(atomic_or_32_nv)
460	ALTENTRY(atomic_or_uint_nv)
461	movl	(%rdi), %eax
4621:
463	movl	%esi, %ecx
464	orl	%eax, %ecx
465	lock
466	cmpxchgl %ecx, (%rdi)
467	jne	1b
468	movl	%ecx, %eax
469	ret
470	SET_SIZE(atomic_or_uint_nv)
471	SET_SIZE(atomic_or_32_nv)
472
473	ENTRY(atomic_or_64_nv)
474	ALTENTRY(atomic_or_ulong_nv)
475	movq	(%rdi), %rax
4761:
477	movq	%rsi, %rcx
478	orq	%rax, %rcx
479	lock
480	cmpxchgq %rcx, (%rdi)
481	jne	1b
482	movq	%rcx, %rax
483	ret
484	SET_SIZE(atomic_or_ulong_nv)
485	SET_SIZE(atomic_or_64_nv)
486
487	ENTRY(atomic_cas_8)
488	ALTENTRY(atomic_cas_uchar)
489	movzbl	%sil, %eax
490	lock
491	cmpxchgb %dl, (%rdi)
492	ret
493	SET_SIZE(atomic_cas_uchar)
494	SET_SIZE(atomic_cas_8)
495
496	ENTRY(atomic_cas_16)
497	ALTENTRY(atomic_cas_ushort)
498	movzwl	%si, %eax
499	lock
500	cmpxchgw %dx, (%rdi)
501	ret
502	SET_SIZE(atomic_cas_ushort)
503	SET_SIZE(atomic_cas_16)
504
505	ENTRY(atomic_cas_32)
506	ALTENTRY(atomic_cas_uint)
507	movl	%esi, %eax
508	lock
509	cmpxchgl %edx, (%rdi)
510	ret
511	SET_SIZE(atomic_cas_uint)
512	SET_SIZE(atomic_cas_32)
513
514	ENTRY(atomic_cas_64)
515	ALTENTRY(atomic_cas_ulong)
516	ALTENTRY(atomic_cas_ptr)
517	movq	%rsi, %rax
518	lock
519	cmpxchgq %rdx, (%rdi)
520	ret
521	SET_SIZE(atomic_cas_ptr)
522	SET_SIZE(atomic_cas_ulong)
523	SET_SIZE(atomic_cas_64)
524
525	ENTRY(atomic_swap_8)
526	ALTENTRY(atomic_swap_uchar)
527	movzbl	%sil, %eax
528	lock
529	xchgb %al, (%rdi)
530	ret
531	SET_SIZE(atomic_swap_uchar)
532	SET_SIZE(atomic_swap_8)
533
534	ENTRY(atomic_swap_16)
535	ALTENTRY(atomic_swap_ushort)
536	movzwl	%si, %eax
537	lock
538	xchgw %ax, (%rdi)
539	ret
540	SET_SIZE(atomic_swap_ushort)
541	SET_SIZE(atomic_swap_16)
542
543	ENTRY(atomic_swap_32)
544	ALTENTRY(atomic_swap_uint)
545	movl	%esi, %eax
546	lock
547	xchgl %eax, (%rdi)
548	ret
549	SET_SIZE(atomic_swap_uint)
550	SET_SIZE(atomic_swap_32)
551
552	ENTRY(atomic_swap_64)
553	ALTENTRY(atomic_swap_ulong)
554	ALTENTRY(atomic_swap_ptr)
555	movq	%rsi, %rax
556	lock
557	xchgq %rax, (%rdi)
558	ret
559	SET_SIZE(atomic_swap_ptr)
560	SET_SIZE(atomic_swap_ulong)
561	SET_SIZE(atomic_swap_64)
562
563	ENTRY(atomic_set_long_excl)
564	xorl	%eax, %eax
565	lock
566	btsq	%rsi, (%rdi)
567	jnc	1f
568	decl	%eax			/ return -1
5691:
570	ret
571	SET_SIZE(atomic_set_long_excl)
572
573	ENTRY(atomic_clear_long_excl)
574	xorl	%eax, %eax
575	lock
576	btrq	%rsi, (%rdi)
577	jc	1f
578	decl	%eax			/ return -1
5791:
580	ret
581	SET_SIZE(atomic_clear_long_excl)
582
583#if !defined(_KERNEL)
584
585	ENTRY(membar_enter)
586	ALTENTRY(membar_exit)
587	mfence
588	ret
589	SET_SIZE(membar_exit)
590	SET_SIZE(membar_enter)
591
592	ENTRY(membar_producer)
593	sfence
594	ret
595	SET_SIZE(membar_producer)
596
597	ENTRY(membar_consumer)
598	lfence
599	ret
600	SET_SIZE(membar_consumer)
601
602#endif	/* !_KERNEL */
603