xref: /titanic_41/usr/src/common/atomic/amd64/atomic.s (revision af3025fd013117424096c94bd9e116ecc538cc6b)
1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21/*
22 * Copyright 2007 Sun Microsystems, Inc.  All rights reserved.
23 * Use is subject to license terms.
24 */
25
26	.ident	"%Z%%M%	%I%	%E% SMI"
27
28	.file	"%M%"
29
30#include <sys/asm_linkage.h>
31
32#if defined(_KERNEL)
33	/*
34	 * Legacy kernel interfaces; they will go away (eventually).
35	 */
36	ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
37	ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
38	ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
39	ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
40	ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
41	ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
42	ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
43#else
44	/*
45	 * Include the definitions for the libc weak aliases.
46	 */
47#include "../atomic_asm_weak.h"
48#endif
49
50	ENTRY(atomic_inc_8)
51	ALTENTRY(atomic_inc_uchar)
52	lock
53	incb	(%rdi)
54	ret
55	SET_SIZE(atomic_inc_uchar)
56	SET_SIZE(atomic_inc_8)
57
58	ENTRY(atomic_inc_16)
59	ALTENTRY(atomic_inc_ushort)
60	lock
61	incw	(%rdi)
62	ret
63	SET_SIZE(atomic_inc_ushort)
64	SET_SIZE(atomic_inc_16)
65
66	ENTRY(atomic_inc_32)
67	ALTENTRY(atomic_inc_uint)
68	lock
69	incl	(%rdi)
70	ret
71	SET_SIZE(atomic_inc_uint)
72	SET_SIZE(atomic_inc_32)
73
74	ENTRY(atomic_inc_64)
75	ALTENTRY(atomic_inc_ulong)
76	lock
77	incq	(%rdi)
78	ret
79	SET_SIZE(atomic_inc_ulong)
80	SET_SIZE(atomic_inc_64)
81
82	ENTRY(atomic_inc_8_nv)
83	ALTENTRY(atomic_inc_uchar_nv)
84	movb	(%rdi), %al	/ %al = old value
851:
86	leaq	1(%rax), %rcx	/ %cl = new value
87	lock
88	cmpxchgb %cl, (%rdi)	/ try to stick it in
89	jne	1b
90	movzbl	%cl, %eax	/ return new value
91	ret
92	SET_SIZE(atomic_inc_uchar_nv)
93	SET_SIZE(atomic_inc_8_nv)
94
95	ENTRY(atomic_inc_16_nv)
96	ALTENTRY(atomic_inc_ushort_nv)
97	movw	(%rdi), %ax	/ %ax = old value
981:
99	leaq	1(%rax), %rcx	/ %cx = new value
100	lock
101	cmpxchgw %cx, (%rdi)	/ try to stick it in
102	jne	1b
103	movzwl	%cx, %eax	/ return new value
104	ret
105	SET_SIZE(atomic_inc_ushort_nv)
106	SET_SIZE(atomic_inc_16_nv)
107
108	ENTRY(atomic_inc_32_nv)
109	ALTENTRY(atomic_inc_uint_nv)
110	movl	(%rdi), %eax	/ %eax = old value
1111:
112	leaq	1(%rax), %rcx	/ %ecx = new value
113	lock
114	cmpxchgl %ecx, (%rdi)	/ try to stick it in
115	jne	1b
116	movl	%ecx, %eax	/ return new value
117	ret
118	SET_SIZE(atomic_inc_uint_nv)
119	SET_SIZE(atomic_inc_32_nv)
120
121	ENTRY(atomic_inc_64_nv)
122	ALTENTRY(atomic_inc_ulong_nv)
123	movq	(%rdi), %rax	/ %rax = old value
1241:
125	leaq	1(%rax), %rcx	/ %rcx = new value
126	lock
127	cmpxchgq %rcx, (%rdi)	/ try to stick it in
128	jne	1b
129	movq	%rcx, %rax	/ return new value
130	ret
131	SET_SIZE(atomic_inc_ulong_nv)
132	SET_SIZE(atomic_inc_64_nv)
133
134	ENTRY(atomic_dec_8)
135	ALTENTRY(atomic_dec_uchar)
136	lock
137	decb	(%rdi)
138	ret
139	SET_SIZE(atomic_dec_uchar)
140	SET_SIZE(atomic_dec_8)
141
142	ENTRY(atomic_dec_16)
143	ALTENTRY(atomic_dec_ushort)
144	lock
145	decw	(%rdi)
146	ret
147	SET_SIZE(atomic_dec_ushort)
148	SET_SIZE(atomic_dec_16)
149
150	ENTRY(atomic_dec_32)
151	ALTENTRY(atomic_dec_uint)
152	lock
153	decl	(%rdi)
154	ret
155	SET_SIZE(atomic_dec_uint)
156	SET_SIZE(atomic_dec_32)
157
158	ENTRY(atomic_dec_64)
159	ALTENTRY(atomic_dec_ulong)
160	lock
161	decq	(%rdi)
162	ret
163	SET_SIZE(atomic_dec_ulong)
164	SET_SIZE(atomic_dec_64)
165
166	ENTRY(atomic_dec_8_nv)
167	ALTENTRY(atomic_dec_uchar_nv)
168	movb	(%rdi), %al	/ %al = old value
1691:
170	leaq	-1(%rax), %rcx	/ %cl = new value
171	lock
172	cmpxchgb %cl, (%rdi)	/ try to stick it in
173	jne	1b
174	movzbl	%cl, %eax	/ return new value
175	ret
176	SET_SIZE(atomic_dec_uchar_nv)
177	SET_SIZE(atomic_dec_8_nv)
178
179	ENTRY(atomic_dec_16_nv)
180	ALTENTRY(atomic_dec_ushort_nv)
181	movw	(%rdi), %ax	/ %ax = old value
1821:
183	leaq	-1(%rax), %rcx	/ %cx = new value
184	lock
185	cmpxchgw %cx, (%rdi)	/ try to stick it in
186	jne	1b
187	movzwl	%cx, %eax	/ return new value
188	ret
189	SET_SIZE(atomic_dec_ushort_nv)
190	SET_SIZE(atomic_dec_16_nv)
191
192	ENTRY(atomic_dec_32_nv)
193	ALTENTRY(atomic_dec_uint_nv)
194	movl	(%rdi), %eax	/ %eax = old value
1951:
196	leaq	-1(%rax), %rcx	/ %ecx = new value
197	lock
198	cmpxchgl %ecx, (%rdi)	/ try to stick it in
199	jne	1b
200	movl	%ecx, %eax	/ return new value
201	ret
202	SET_SIZE(atomic_dec_uint_nv)
203	SET_SIZE(atomic_dec_32_nv)
204
205	ENTRY(atomic_dec_64_nv)
206	ALTENTRY(atomic_dec_ulong_nv)
207	movq	(%rdi), %rax	/ %rax = old value
2081:
209	leaq	-1(%rax), %rcx	/ %rcx = new value
210	lock
211	cmpxchgq %rcx, (%rdi)	/ try to stick it in
212	jne	1b
213	movq	%rcx, %rax	/ return new value
214	ret
215	SET_SIZE(atomic_dec_ulong_nv)
216	SET_SIZE(atomic_dec_64_nv)
217
218	ENTRY(atomic_add_8)
219	ALTENTRY(atomic_add_char)
220	lock
221	addb	%sil, (%rdi)
222	ret
223	SET_SIZE(atomic_add_char)
224	SET_SIZE(atomic_add_8)
225
226	ENTRY(atomic_add_16)
227	ALTENTRY(atomic_add_short)
228	lock
229	addw	%si, (%rdi)
230	ret
231	SET_SIZE(atomic_add_short)
232	SET_SIZE(atomic_add_16)
233
234	ENTRY(atomic_add_32)
235	ALTENTRY(atomic_add_int)
236	lock
237	addl	%esi, (%rdi)
238	ret
239	SET_SIZE(atomic_add_int)
240	SET_SIZE(atomic_add_32)
241
242	ENTRY(atomic_add_64)
243	ALTENTRY(atomic_add_ptr)
244	ALTENTRY(atomic_add_long)
245	lock
246	addq	%rsi, (%rdi)
247	ret
248	SET_SIZE(atomic_add_long)
249	SET_SIZE(atomic_add_ptr)
250	SET_SIZE(atomic_add_64)
251
252	ENTRY(atomic_or_8)
253	ALTENTRY(atomic_or_uchar)
254	lock
255	orb	%sil, (%rdi)
256	ret
257	SET_SIZE(atomic_or_uchar)
258	SET_SIZE(atomic_or_8)
259
260	ENTRY(atomic_or_16)
261	ALTENTRY(atomic_or_ushort)
262	lock
263	orw	%si, (%rdi)
264	ret
265	SET_SIZE(atomic_or_ushort)
266	SET_SIZE(atomic_or_16)
267
268	ENTRY(atomic_or_32)
269	ALTENTRY(atomic_or_uint)
270	lock
271	orl	%esi, (%rdi)
272	ret
273	SET_SIZE(atomic_or_uint)
274	SET_SIZE(atomic_or_32)
275
276	ENTRY(atomic_or_64)
277	ALTENTRY(atomic_or_ulong)
278	lock
279	orq	%rsi, (%rdi)
280	ret
281	SET_SIZE(atomic_or_ulong)
282	SET_SIZE(atomic_or_64)
283
284	ENTRY(atomic_and_8)
285	ALTENTRY(atomic_and_uchar)
286	lock
287	andb	%sil, (%rdi)
288	ret
289	SET_SIZE(atomic_and_uchar)
290	SET_SIZE(atomic_and_8)
291
292	ENTRY(atomic_and_16)
293	ALTENTRY(atomic_and_ushort)
294	lock
295	andw	%si, (%rdi)
296	ret
297	SET_SIZE(atomic_and_ushort)
298	SET_SIZE(atomic_and_16)
299
300	ENTRY(atomic_and_32)
301	ALTENTRY(atomic_and_uint)
302	lock
303	andl	%esi, (%rdi)
304	ret
305	SET_SIZE(atomic_and_uint)
306	SET_SIZE(atomic_and_32)
307
308	ENTRY(atomic_and_64)
309	ALTENTRY(atomic_and_ulong)
310	lock
311	andq	%rsi, (%rdi)
312	ret
313	SET_SIZE(atomic_and_ulong)
314	SET_SIZE(atomic_and_64)
315
316	ENTRY(atomic_add_8_nv)
317	ALTENTRY(atomic_add_char_nv)
318	movb	(%rdi), %al	/ %al = old value
3191:
320	movb	%sil, %cl
321	addb	%al, %cl	/ %cl = new value
322	lock
323	cmpxchgb %cl, (%rdi)	/ try to stick it in
324	jne	1b
325	movzbl	%cl, %eax	/ return new value
326	ret
327	SET_SIZE(atomic_add_char_nv)
328	SET_SIZE(atomic_add_8_nv)
329
330	ENTRY(atomic_add_16_nv)
331	ALTENTRY(atomic_add_short_nv)
332	movw	(%rdi), %ax	/ %ax = old value
3331:
334	movw	%si, %cx
335	addw	%ax, %cx	/ %cx = new value
336	lock
337	cmpxchgw %cx, (%rdi)	/ try to stick it in
338	jne	1b
339	movzwl	%cx, %eax	/ return new value
340	ret
341	SET_SIZE(atomic_add_short_nv)
342	SET_SIZE(atomic_add_16_nv)
343
344	ENTRY(atomic_add_32_nv)
345	ALTENTRY(atomic_add_int_nv)
346	movl	(%rdi), %eax
3471:
348	movl	%esi, %ecx
349	addl	%eax, %ecx
350	lock
351	cmpxchgl %ecx, (%rdi)
352	jne	1b
353	movl	%ecx, %eax
354	ret
355	SET_SIZE(atomic_add_int_nv)
356	SET_SIZE(atomic_add_32_nv)
357
358	ENTRY(atomic_add_64_nv)
359	ALTENTRY(atomic_add_ptr_nv)
360	ALTENTRY(atomic_add_long_nv)
361	movq	(%rdi), %rax
3621:
363	movq	%rsi, %rcx
364	addq	%rax, %rcx
365	lock
366	cmpxchgq %rcx, (%rdi)
367	jne	1b
368	movq	%rcx, %rax
369	ret
370	SET_SIZE(atomic_add_long_nv)
371	SET_SIZE(atomic_add_ptr_nv)
372	SET_SIZE(atomic_add_64_nv)
373
374	ENTRY(atomic_and_8_nv)
375	ALTENTRY(atomic_and_uchar_nv)
376	movb	(%rdi), %al	/ %al = old value
3771:
378	movb	%sil, %cl
379	andb	%al, %cl	/ %cl = new value
380	lock
381	cmpxchgb %cl, (%rdi)	/ try to stick it in
382	jne	1b
383	movzbl	%cl, %eax	/ return new value
384	ret
385	SET_SIZE(atomic_and_uchar_nv)
386	SET_SIZE(atomic_and_8_nv)
387
388	ENTRY(atomic_and_16_nv)
389	ALTENTRY(atomic_and_ushort_nv)
390	movw	(%rdi), %ax	/ %ax = old value
3911:
392	movw	%si, %cx
393	andw	%ax, %cx	/ %cx = new value
394	lock
395	cmpxchgw %cx, (%rdi)	/ try to stick it in
396	jne	1b
397	movzwl	%cx, %eax	/ return new value
398	ret
399	SET_SIZE(atomic_and_ushort_nv)
400	SET_SIZE(atomic_and_16_nv)
401
402	ENTRY(atomic_and_32_nv)
403	ALTENTRY(atomic_and_uint_nv)
404	movl	(%rdi), %eax
4051:
406	movl	%esi, %ecx
407	andl	%eax, %ecx
408	lock
409	cmpxchgl %ecx, (%rdi)
410	jne	1b
411	movl	%ecx, %eax
412	ret
413	SET_SIZE(atomic_and_uint_nv)
414	SET_SIZE(atomic_and_32_nv)
415
416	ENTRY(atomic_and_64_nv)
417	ALTENTRY(atomic_and_ulong_nv)
418	movq	(%rdi), %rax
4191:
420	movq	%rsi, %rcx
421	andq	%rax, %rcx
422	lock
423	cmpxchgq %rcx, (%rdi)
424	jne	1b
425	movq	%rcx, %rax
426	ret
427	SET_SIZE(atomic_and_ulong_nv)
428	SET_SIZE(atomic_and_64_nv)
429
430	ENTRY(atomic_or_8_nv)
431	ALTENTRY(atomic_or_uchar_nv)
432	movb	(%rdi), %al	/ %al = old value
4331:
434	movb	%sil, %cl
435	orb	%al, %cl	/ %cl = new value
436	lock
437	cmpxchgb %cl, (%rdi)	/ try to stick it in
438	jne	1b
439	movzbl	%cl, %eax	/ return new value
440	ret
441	SET_SIZE(atomic_and_uchar_nv)
442	SET_SIZE(atomic_and_8_nv)
443
444	ENTRY(atomic_or_16_nv)
445	ALTENTRY(atomic_or_ushort_nv)
446	movw	(%rdi), %ax	/ %ax = old value
4471:
448	movw	%si, %cx
449	orw	%ax, %cx	/ %cx = new value
450	lock
451	cmpxchgw %cx, (%rdi)	/ try to stick it in
452	jne	1b
453	movzwl	%cx, %eax	/ return new value
454	ret
455	SET_SIZE(atomic_or_ushort_nv)
456	SET_SIZE(atomic_or_16_nv)
457
458	ENTRY(atomic_or_32_nv)
459	ALTENTRY(atomic_or_uint_nv)
460	movl	(%rdi), %eax
4611:
462	movl	%esi, %ecx
463	orl	%eax, %ecx
464	lock
465	cmpxchgl %ecx, (%rdi)
466	jne	1b
467	movl	%ecx, %eax
468	ret
469	SET_SIZE(atomic_or_uint_nv)
470	SET_SIZE(atomic_or_32_nv)
471
472	ENTRY(atomic_or_64_nv)
473	ALTENTRY(atomic_or_ulong_nv)
474	movq	(%rdi), %rax
4751:
476	movq	%rsi, %rcx
477	orq	%rax, %rcx
478	lock
479	cmpxchgq %rcx, (%rdi)
480	jne	1b
481	movq	%rcx, %rax
482	ret
483	SET_SIZE(atomic_or_ulong_nv)
484	SET_SIZE(atomic_or_64_nv)
485
486	ENTRY(atomic_cas_8)
487	ALTENTRY(atomic_cas_uchar)
488	movzbl	%sil, %eax
489	lock
490	cmpxchgb %dl, (%rdi)
491	ret
492	SET_SIZE(atomic_cas_uchar)
493	SET_SIZE(atomic_cas_8)
494
495	ENTRY(atomic_cas_16)
496	ALTENTRY(atomic_cas_ushort)
497	movzwl	%si, %eax
498	lock
499	cmpxchgw %dx, (%rdi)
500	ret
501	SET_SIZE(atomic_cas_ushort)
502	SET_SIZE(atomic_cas_16)
503
504	ENTRY(atomic_cas_32)
505	ALTENTRY(atomic_cas_uint)
506	movl	%esi, %eax
507	lock
508	cmpxchgl %edx, (%rdi)
509	ret
510	SET_SIZE(atomic_cas_uint)
511	SET_SIZE(atomic_cas_32)
512
513	ENTRY(atomic_cas_64)
514	ALTENTRY(atomic_cas_ulong)
515	ALTENTRY(atomic_cas_ptr)
516	movq	%rsi, %rax
517	lock
518	cmpxchgq %rdx, (%rdi)
519	ret
520	SET_SIZE(atomic_cas_ptr)
521	SET_SIZE(atomic_cas_ulong)
522	SET_SIZE(atomic_cas_64)
523
524	ENTRY(atomic_swap_8)
525	ALTENTRY(atomic_swap_uchar)
526	movzbl	%sil, %eax
527	lock
528	xchgb %al, (%rdi)
529	ret
530	SET_SIZE(atomic_swap_uchar)
531	SET_SIZE(atomic_swap_8)
532
533	ENTRY(atomic_swap_16)
534	ALTENTRY(atomic_swap_ushort)
535	movzwl	%si, %eax
536	lock
537	xchgw %ax, (%rdi)
538	ret
539	SET_SIZE(atomic_swap_ushort)
540	SET_SIZE(atomic_swap_16)
541
542	ENTRY(atomic_swap_32)
543	ALTENTRY(atomic_swap_uint)
544	movl	%esi, %eax
545	lock
546	xchgl %eax, (%rdi)
547	ret
548	SET_SIZE(atomic_swap_uint)
549	SET_SIZE(atomic_swap_32)
550
551	ENTRY(atomic_swap_64)
552	ALTENTRY(atomic_swap_ulong)
553	ALTENTRY(atomic_swap_ptr)
554	movq	%rsi, %rax
555	lock
556	xchgq %rax, (%rdi)
557	ret
558	SET_SIZE(atomic_swap_ptr)
559	SET_SIZE(atomic_swap_ulong)
560	SET_SIZE(atomic_swap_64)
561
562	ENTRY(atomic_set_long_excl)
563	xorl	%eax, %eax
564	lock
565	btsq	%rsi, (%rdi)
566	jnc	1f
567	decl	%eax			/ return -1
5681:
569	ret
570	SET_SIZE(atomic_set_long_excl)
571
572	ENTRY(atomic_clear_long_excl)
573	xorl	%eax, %eax
574	lock
575	btrq	%rsi, (%rdi)
576	jc	1f
577	decl	%eax			/ return -1
5781:
579	ret
580	SET_SIZE(atomic_clear_long_excl)
581
582#if !defined(_KERNEL)
583
584	/*
585	 * NOTE: membar_enter, and membar_exit are identical routines.
586	 * We define them separately, instead of using an ALTENTRY
587	 * definitions to alias them together, so that DTrace and
588	 * debuggers will see a unique address for them, allowing
589	 * more accurate tracing.
590	*/
591
592	ENTRY(membar_enter)
593	mfence
594	ret
595	SET_SIZE(membar_enter)
596
597	ENTRY(membar_exit)
598	mfence
599	ret
600	SET_SIZE(membar_exit)
601
602	ENTRY(membar_producer)
603	sfence
604	ret
605	SET_SIZE(membar_producer)
606
607	ENTRY(membar_consumer)
608	lfence
609	ret
610	SET_SIZE(membar_consumer)
611
612#endif	/* !_KERNEL */
613