xref: /titanic_41/usr/src/common/atomic/amd64/atomic.s (revision fcf3ce441efd61da9bb2884968af01cb7c1452cc)
1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21
22/*
23 * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
24 * Use is subject to license terms.
25 */
26
27	.file	"atomic.s"
28
29#include <sys/asm_linkage.h>
30
31#if defined(_KERNEL)
32	/*
33	 * Legacy kernel interfaces; they will go away (eventually).
34	 */
35	ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
36	ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
37	ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
38	ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
39	ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
40	ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
41	ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
42#endif
43
44	ENTRY(atomic_inc_8)
45	ALTENTRY(atomic_inc_uchar)
46	lock
47	incb	(%rdi)
48	ret
49	SET_SIZE(atomic_inc_uchar)
50	SET_SIZE(atomic_inc_8)
51
52	ENTRY(atomic_inc_16)
53	ALTENTRY(atomic_inc_ushort)
54	lock
55	incw	(%rdi)
56	ret
57	SET_SIZE(atomic_inc_ushort)
58	SET_SIZE(atomic_inc_16)
59
60	ENTRY(atomic_inc_32)
61	ALTENTRY(atomic_inc_uint)
62	lock
63	incl	(%rdi)
64	ret
65	SET_SIZE(atomic_inc_uint)
66	SET_SIZE(atomic_inc_32)
67
68	ENTRY(atomic_inc_64)
69	ALTENTRY(atomic_inc_ulong)
70	lock
71	incq	(%rdi)
72	ret
73	SET_SIZE(atomic_inc_ulong)
74	SET_SIZE(atomic_inc_64)
75
76	ENTRY(atomic_inc_8_nv)
77	ALTENTRY(atomic_inc_uchar_nv)
78	movb	(%rdi), %al	/ %al = old value
791:
80	leaq	1(%rax), %rcx	/ %cl = new value
81	lock
82	cmpxchgb %cl, (%rdi)	/ try to stick it in
83	jne	1b
84	movzbl	%cl, %eax	/ return new value
85	ret
86	SET_SIZE(atomic_inc_uchar_nv)
87	SET_SIZE(atomic_inc_8_nv)
88
89	ENTRY(atomic_inc_16_nv)
90	ALTENTRY(atomic_inc_ushort_nv)
91	movw	(%rdi), %ax	/ %ax = old value
921:
93	leaq	1(%rax), %rcx	/ %cx = new value
94	lock
95	cmpxchgw %cx, (%rdi)	/ try to stick it in
96	jne	1b
97	movzwl	%cx, %eax	/ return new value
98	ret
99	SET_SIZE(atomic_inc_ushort_nv)
100	SET_SIZE(atomic_inc_16_nv)
101
102	ENTRY(atomic_inc_32_nv)
103	ALTENTRY(atomic_inc_uint_nv)
104	movl	(%rdi), %eax	/ %eax = old value
1051:
106	leaq	1(%rax), %rcx	/ %ecx = new value
107	lock
108	cmpxchgl %ecx, (%rdi)	/ try to stick it in
109	jne	1b
110	movl	%ecx, %eax	/ return new value
111	ret
112	SET_SIZE(atomic_inc_uint_nv)
113	SET_SIZE(atomic_inc_32_nv)
114
115	ENTRY(atomic_inc_64_nv)
116	ALTENTRY(atomic_inc_ulong_nv)
117	movq	(%rdi), %rax	/ %rax = old value
1181:
119	leaq	1(%rax), %rcx	/ %rcx = new value
120	lock
121	cmpxchgq %rcx, (%rdi)	/ try to stick it in
122	jne	1b
123	movq	%rcx, %rax	/ return new value
124	ret
125	SET_SIZE(atomic_inc_ulong_nv)
126	SET_SIZE(atomic_inc_64_nv)
127
128	ENTRY(atomic_dec_8)
129	ALTENTRY(atomic_dec_uchar)
130	lock
131	decb	(%rdi)
132	ret
133	SET_SIZE(atomic_dec_uchar)
134	SET_SIZE(atomic_dec_8)
135
136	ENTRY(atomic_dec_16)
137	ALTENTRY(atomic_dec_ushort)
138	lock
139	decw	(%rdi)
140	ret
141	SET_SIZE(atomic_dec_ushort)
142	SET_SIZE(atomic_dec_16)
143
144	ENTRY(atomic_dec_32)
145	ALTENTRY(atomic_dec_uint)
146	lock
147	decl	(%rdi)
148	ret
149	SET_SIZE(atomic_dec_uint)
150	SET_SIZE(atomic_dec_32)
151
152	ENTRY(atomic_dec_64)
153	ALTENTRY(atomic_dec_ulong)
154	lock
155	decq	(%rdi)
156	ret
157	SET_SIZE(atomic_dec_ulong)
158	SET_SIZE(atomic_dec_64)
159
160	ENTRY(atomic_dec_8_nv)
161	ALTENTRY(atomic_dec_uchar_nv)
162	movb	(%rdi), %al	/ %al = old value
1631:
164	leaq	-1(%rax), %rcx	/ %cl = new value
165	lock
166	cmpxchgb %cl, (%rdi)	/ try to stick it in
167	jne	1b
168	movzbl	%cl, %eax	/ return new value
169	ret
170	SET_SIZE(atomic_dec_uchar_nv)
171	SET_SIZE(atomic_dec_8_nv)
172
173	ENTRY(atomic_dec_16_nv)
174	ALTENTRY(atomic_dec_ushort_nv)
175	movw	(%rdi), %ax	/ %ax = old value
1761:
177	leaq	-1(%rax), %rcx	/ %cx = new value
178	lock
179	cmpxchgw %cx, (%rdi)	/ try to stick it in
180	jne	1b
181	movzwl	%cx, %eax	/ return new value
182	ret
183	SET_SIZE(atomic_dec_ushort_nv)
184	SET_SIZE(atomic_dec_16_nv)
185
186	ENTRY(atomic_dec_32_nv)
187	ALTENTRY(atomic_dec_uint_nv)
188	movl	(%rdi), %eax	/ %eax = old value
1891:
190	leaq	-1(%rax), %rcx	/ %ecx = new value
191	lock
192	cmpxchgl %ecx, (%rdi)	/ try to stick it in
193	jne	1b
194	movl	%ecx, %eax	/ return new value
195	ret
196	SET_SIZE(atomic_dec_uint_nv)
197	SET_SIZE(atomic_dec_32_nv)
198
199	ENTRY(atomic_dec_64_nv)
200	ALTENTRY(atomic_dec_ulong_nv)
201	movq	(%rdi), %rax	/ %rax = old value
2021:
203	leaq	-1(%rax), %rcx	/ %rcx = new value
204	lock
205	cmpxchgq %rcx, (%rdi)	/ try to stick it in
206	jne	1b
207	movq	%rcx, %rax	/ return new value
208	ret
209	SET_SIZE(atomic_dec_ulong_nv)
210	SET_SIZE(atomic_dec_64_nv)
211
212	ENTRY(atomic_add_8)
213	ALTENTRY(atomic_add_char)
214	lock
215	addb	%sil, (%rdi)
216	ret
217	SET_SIZE(atomic_add_char)
218	SET_SIZE(atomic_add_8)
219
220	ENTRY(atomic_add_16)
221	ALTENTRY(atomic_add_short)
222	lock
223	addw	%si, (%rdi)
224	ret
225	SET_SIZE(atomic_add_short)
226	SET_SIZE(atomic_add_16)
227
228	ENTRY(atomic_add_32)
229	ALTENTRY(atomic_add_int)
230	lock
231	addl	%esi, (%rdi)
232	ret
233	SET_SIZE(atomic_add_int)
234	SET_SIZE(atomic_add_32)
235
236	ENTRY(atomic_add_64)
237	ALTENTRY(atomic_add_ptr)
238	ALTENTRY(atomic_add_long)
239	lock
240	addq	%rsi, (%rdi)
241	ret
242	SET_SIZE(atomic_add_long)
243	SET_SIZE(atomic_add_ptr)
244	SET_SIZE(atomic_add_64)
245
246	ENTRY(atomic_or_8)
247	ALTENTRY(atomic_or_uchar)
248	lock
249	orb	%sil, (%rdi)
250	ret
251	SET_SIZE(atomic_or_uchar)
252	SET_SIZE(atomic_or_8)
253
254	ENTRY(atomic_or_16)
255	ALTENTRY(atomic_or_ushort)
256	lock
257	orw	%si, (%rdi)
258	ret
259	SET_SIZE(atomic_or_ushort)
260	SET_SIZE(atomic_or_16)
261
262	ENTRY(atomic_or_32)
263	ALTENTRY(atomic_or_uint)
264	lock
265	orl	%esi, (%rdi)
266	ret
267	SET_SIZE(atomic_or_uint)
268	SET_SIZE(atomic_or_32)
269
270	ENTRY(atomic_or_64)
271	ALTENTRY(atomic_or_ulong)
272	lock
273	orq	%rsi, (%rdi)
274	ret
275	SET_SIZE(atomic_or_ulong)
276	SET_SIZE(atomic_or_64)
277
278	ENTRY(atomic_and_8)
279	ALTENTRY(atomic_and_uchar)
280	lock
281	andb	%sil, (%rdi)
282	ret
283	SET_SIZE(atomic_and_uchar)
284	SET_SIZE(atomic_and_8)
285
286	ENTRY(atomic_and_16)
287	ALTENTRY(atomic_and_ushort)
288	lock
289	andw	%si, (%rdi)
290	ret
291	SET_SIZE(atomic_and_ushort)
292	SET_SIZE(atomic_and_16)
293
294	ENTRY(atomic_and_32)
295	ALTENTRY(atomic_and_uint)
296	lock
297	andl	%esi, (%rdi)
298	ret
299	SET_SIZE(atomic_and_uint)
300	SET_SIZE(atomic_and_32)
301
302	ENTRY(atomic_and_64)
303	ALTENTRY(atomic_and_ulong)
304	lock
305	andq	%rsi, (%rdi)
306	ret
307	SET_SIZE(atomic_and_ulong)
308	SET_SIZE(atomic_and_64)
309
310	ENTRY(atomic_add_8_nv)
311	ALTENTRY(atomic_add_char_nv)
312	movb	(%rdi), %al	/ %al = old value
3131:
314	movb	%sil, %cl
315	addb	%al, %cl	/ %cl = new value
316	lock
317	cmpxchgb %cl, (%rdi)	/ try to stick it in
318	jne	1b
319	movzbl	%cl, %eax	/ return new value
320	ret
321	SET_SIZE(atomic_add_char_nv)
322	SET_SIZE(atomic_add_8_nv)
323
324	ENTRY(atomic_add_16_nv)
325	ALTENTRY(atomic_add_short_nv)
326	movw	(%rdi), %ax	/ %ax = old value
3271:
328	movw	%si, %cx
329	addw	%ax, %cx	/ %cx = new value
330	lock
331	cmpxchgw %cx, (%rdi)	/ try to stick it in
332	jne	1b
333	movzwl	%cx, %eax	/ return new value
334	ret
335	SET_SIZE(atomic_add_short_nv)
336	SET_SIZE(atomic_add_16_nv)
337
338	ENTRY(atomic_add_32_nv)
339	ALTENTRY(atomic_add_int_nv)
340	movl	(%rdi), %eax
3411:
342	movl	%esi, %ecx
343	addl	%eax, %ecx
344	lock
345	cmpxchgl %ecx, (%rdi)
346	jne	1b
347	movl	%ecx, %eax
348	ret
349	SET_SIZE(atomic_add_int_nv)
350	SET_SIZE(atomic_add_32_nv)
351
352	ENTRY(atomic_add_64_nv)
353	ALTENTRY(atomic_add_ptr_nv)
354	ALTENTRY(atomic_add_long_nv)
355	movq	(%rdi), %rax
3561:
357	movq	%rsi, %rcx
358	addq	%rax, %rcx
359	lock
360	cmpxchgq %rcx, (%rdi)
361	jne	1b
362	movq	%rcx, %rax
363	ret
364	SET_SIZE(atomic_add_long_nv)
365	SET_SIZE(atomic_add_ptr_nv)
366	SET_SIZE(atomic_add_64_nv)
367
368	ENTRY(atomic_and_8_nv)
369	ALTENTRY(atomic_and_uchar_nv)
370	movb	(%rdi), %al	/ %al = old value
3711:
372	movb	%sil, %cl
373	andb	%al, %cl	/ %cl = new value
374	lock
375	cmpxchgb %cl, (%rdi)	/ try to stick it in
376	jne	1b
377	movzbl	%cl, %eax	/ return new value
378	ret
379	SET_SIZE(atomic_and_uchar_nv)
380	SET_SIZE(atomic_and_8_nv)
381
382	ENTRY(atomic_and_16_nv)
383	ALTENTRY(atomic_and_ushort_nv)
384	movw	(%rdi), %ax	/ %ax = old value
3851:
386	movw	%si, %cx
387	andw	%ax, %cx	/ %cx = new value
388	lock
389	cmpxchgw %cx, (%rdi)	/ try to stick it in
390	jne	1b
391	movzwl	%cx, %eax	/ return new value
392	ret
393	SET_SIZE(atomic_and_ushort_nv)
394	SET_SIZE(atomic_and_16_nv)
395
396	ENTRY(atomic_and_32_nv)
397	ALTENTRY(atomic_and_uint_nv)
398	movl	(%rdi), %eax
3991:
400	movl	%esi, %ecx
401	andl	%eax, %ecx
402	lock
403	cmpxchgl %ecx, (%rdi)
404	jne	1b
405	movl	%ecx, %eax
406	ret
407	SET_SIZE(atomic_and_uint_nv)
408	SET_SIZE(atomic_and_32_nv)
409
410	ENTRY(atomic_and_64_nv)
411	ALTENTRY(atomic_and_ulong_nv)
412	movq	(%rdi), %rax
4131:
414	movq	%rsi, %rcx
415	andq	%rax, %rcx
416	lock
417	cmpxchgq %rcx, (%rdi)
418	jne	1b
419	movq	%rcx, %rax
420	ret
421	SET_SIZE(atomic_and_ulong_nv)
422	SET_SIZE(atomic_and_64_nv)
423
424	ENTRY(atomic_or_8_nv)
425	ALTENTRY(atomic_or_uchar_nv)
426	movb	(%rdi), %al	/ %al = old value
4271:
428	movb	%sil, %cl
429	orb	%al, %cl	/ %cl = new value
430	lock
431	cmpxchgb %cl, (%rdi)	/ try to stick it in
432	jne	1b
433	movzbl	%cl, %eax	/ return new value
434	ret
435	SET_SIZE(atomic_and_uchar_nv)
436	SET_SIZE(atomic_and_8_nv)
437
438	ENTRY(atomic_or_16_nv)
439	ALTENTRY(atomic_or_ushort_nv)
440	movw	(%rdi), %ax	/ %ax = old value
4411:
442	movw	%si, %cx
443	orw	%ax, %cx	/ %cx = new value
444	lock
445	cmpxchgw %cx, (%rdi)	/ try to stick it in
446	jne	1b
447	movzwl	%cx, %eax	/ return new value
448	ret
449	SET_SIZE(atomic_or_ushort_nv)
450	SET_SIZE(atomic_or_16_nv)
451
452	ENTRY(atomic_or_32_nv)
453	ALTENTRY(atomic_or_uint_nv)
454	movl	(%rdi), %eax
4551:
456	movl	%esi, %ecx
457	orl	%eax, %ecx
458	lock
459	cmpxchgl %ecx, (%rdi)
460	jne	1b
461	movl	%ecx, %eax
462	ret
463	SET_SIZE(atomic_or_uint_nv)
464	SET_SIZE(atomic_or_32_nv)
465
466	ENTRY(atomic_or_64_nv)
467	ALTENTRY(atomic_or_ulong_nv)
468	movq	(%rdi), %rax
4691:
470	movq	%rsi, %rcx
471	orq	%rax, %rcx
472	lock
473	cmpxchgq %rcx, (%rdi)
474	jne	1b
475	movq	%rcx, %rax
476	ret
477	SET_SIZE(atomic_or_ulong_nv)
478	SET_SIZE(atomic_or_64_nv)
479
480	ENTRY(atomic_cas_8)
481	ALTENTRY(atomic_cas_uchar)
482	movzbl	%sil, %eax
483	lock
484	cmpxchgb %dl, (%rdi)
485	ret
486	SET_SIZE(atomic_cas_uchar)
487	SET_SIZE(atomic_cas_8)
488
489	ENTRY(atomic_cas_16)
490	ALTENTRY(atomic_cas_ushort)
491	movzwl	%si, %eax
492	lock
493	cmpxchgw %dx, (%rdi)
494	ret
495	SET_SIZE(atomic_cas_ushort)
496	SET_SIZE(atomic_cas_16)
497
498	ENTRY(atomic_cas_32)
499	ALTENTRY(atomic_cas_uint)
500	movl	%esi, %eax
501	lock
502	cmpxchgl %edx, (%rdi)
503	ret
504	SET_SIZE(atomic_cas_uint)
505	SET_SIZE(atomic_cas_32)
506
507	ENTRY(atomic_cas_64)
508	ALTENTRY(atomic_cas_ulong)
509	ALTENTRY(atomic_cas_ptr)
510	movq	%rsi, %rax
511	lock
512	cmpxchgq %rdx, (%rdi)
513	ret
514	SET_SIZE(atomic_cas_ptr)
515	SET_SIZE(atomic_cas_ulong)
516	SET_SIZE(atomic_cas_64)
517
518	ENTRY(atomic_swap_8)
519	ALTENTRY(atomic_swap_uchar)
520	movzbl	%sil, %eax
521	lock
522	xchgb %al, (%rdi)
523	ret
524	SET_SIZE(atomic_swap_uchar)
525	SET_SIZE(atomic_swap_8)
526
527	ENTRY(atomic_swap_16)
528	ALTENTRY(atomic_swap_ushort)
529	movzwl	%si, %eax
530	lock
531	xchgw %ax, (%rdi)
532	ret
533	SET_SIZE(atomic_swap_ushort)
534	SET_SIZE(atomic_swap_16)
535
536	ENTRY(atomic_swap_32)
537	ALTENTRY(atomic_swap_uint)
538	movl	%esi, %eax
539	lock
540	xchgl %eax, (%rdi)
541	ret
542	SET_SIZE(atomic_swap_uint)
543	SET_SIZE(atomic_swap_32)
544
545	ENTRY(atomic_swap_64)
546	ALTENTRY(atomic_swap_ulong)
547	ALTENTRY(atomic_swap_ptr)
548	movq	%rsi, %rax
549	lock
550	xchgq %rax, (%rdi)
551	ret
552	SET_SIZE(atomic_swap_ptr)
553	SET_SIZE(atomic_swap_ulong)
554	SET_SIZE(atomic_swap_64)
555
556	ENTRY(atomic_set_long_excl)
557	xorl	%eax, %eax
558	lock
559	btsq	%rsi, (%rdi)
560	jnc	1f
561	decl	%eax			/ return -1
5621:
563	ret
564	SET_SIZE(atomic_set_long_excl)
565
566	ENTRY(atomic_clear_long_excl)
567	xorl	%eax, %eax
568	lock
569	btrq	%rsi, (%rdi)
570	jc	1f
571	decl	%eax			/ return -1
5721:
573	ret
574	SET_SIZE(atomic_clear_long_excl)
575
576#if !defined(_KERNEL)
577
578	/*
579	 * NOTE: membar_enter, and membar_exit are identical routines.
580	 * We define them separately, instead of using an ALTENTRY
581	 * definitions to alias them together, so that DTrace and
582	 * debuggers will see a unique address for them, allowing
583	 * more accurate tracing.
584	*/
585
586	ENTRY(membar_enter)
587	mfence
588	ret
589	SET_SIZE(membar_enter)
590
591	ENTRY(membar_exit)
592	mfence
593	ret
594	SET_SIZE(membar_exit)
595
596	ENTRY(membar_producer)
597	sfence
598	ret
599	SET_SIZE(membar_producer)
600
601	ENTRY(membar_consumer)
602	lfence
603	ret
604	SET_SIZE(membar_consumer)
605
606#endif	/* !_KERNEL */
607