xref: /titanic_41/usr/src/common/atomic/amd64/atomic.s (revision ae5b046d8f8cec187d40041c4b74b43f561d5ac7)
1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21
22/*
23 * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
24 * Use is subject to license terms.
25 */
26
27#pragma ident	"%Z%%M%	%I%	%E% SMI"
28
29	.file	"%M%"
30
31#include <sys/asm_linkage.h>
32
33#if defined(_KERNEL)
34	/*
35	 * Legacy kernel interfaces; they will go away (eventually).
36	 */
37	ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
38	ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
39	ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
40	ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
41	ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
42	ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
43	ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
44#endif
45
46	ENTRY(atomic_inc_8)
47	ALTENTRY(atomic_inc_uchar)
48	lock
49	incb	(%rdi)
50	ret
51	SET_SIZE(atomic_inc_uchar)
52	SET_SIZE(atomic_inc_8)
53
54	ENTRY(atomic_inc_16)
55	ALTENTRY(atomic_inc_ushort)
56	lock
57	incw	(%rdi)
58	ret
59	SET_SIZE(atomic_inc_ushort)
60	SET_SIZE(atomic_inc_16)
61
62	ENTRY(atomic_inc_32)
63	ALTENTRY(atomic_inc_uint)
64	lock
65	incl	(%rdi)
66	ret
67	SET_SIZE(atomic_inc_uint)
68	SET_SIZE(atomic_inc_32)
69
70	ENTRY(atomic_inc_64)
71	ALTENTRY(atomic_inc_ulong)
72	lock
73	incq	(%rdi)
74	ret
75	SET_SIZE(atomic_inc_ulong)
76	SET_SIZE(atomic_inc_64)
77
78	ENTRY(atomic_inc_8_nv)
79	ALTENTRY(atomic_inc_uchar_nv)
80	movb	(%rdi), %al	/ %al = old value
811:
82	leaq	1(%rax), %rcx	/ %cl = new value
83	lock
84	cmpxchgb %cl, (%rdi)	/ try to stick it in
85	jne	1b
86	movzbl	%cl, %eax	/ return new value
87	ret
88	SET_SIZE(atomic_inc_uchar_nv)
89	SET_SIZE(atomic_inc_8_nv)
90
91	ENTRY(atomic_inc_16_nv)
92	ALTENTRY(atomic_inc_ushort_nv)
93	movw	(%rdi), %ax	/ %ax = old value
941:
95	leaq	1(%rax), %rcx	/ %cx = new value
96	lock
97	cmpxchgw %cx, (%rdi)	/ try to stick it in
98	jne	1b
99	movzwl	%cx, %eax	/ return new value
100	ret
101	SET_SIZE(atomic_inc_ushort_nv)
102	SET_SIZE(atomic_inc_16_nv)
103
104	ENTRY(atomic_inc_32_nv)
105	ALTENTRY(atomic_inc_uint_nv)
106	movl	(%rdi), %eax	/ %eax = old value
1071:
108	leaq	1(%rax), %rcx	/ %ecx = new value
109	lock
110	cmpxchgl %ecx, (%rdi)	/ try to stick it in
111	jne	1b
112	movl	%ecx, %eax	/ return new value
113	ret
114	SET_SIZE(atomic_inc_uint_nv)
115	SET_SIZE(atomic_inc_32_nv)
116
117	ENTRY(atomic_inc_64_nv)
118	ALTENTRY(atomic_inc_ulong_nv)
119	movq	(%rdi), %rax	/ %rax = old value
1201:
121	leaq	1(%rax), %rcx	/ %rcx = new value
122	lock
123	cmpxchgq %rcx, (%rdi)	/ try to stick it in
124	jne	1b
125	movq	%rcx, %rax	/ return new value
126	ret
127	SET_SIZE(atomic_inc_ulong_nv)
128	SET_SIZE(atomic_inc_64_nv)
129
130	ENTRY(atomic_dec_8)
131	ALTENTRY(atomic_dec_uchar)
132	lock
133	decb	(%rdi)
134	ret
135	SET_SIZE(atomic_dec_uchar)
136	SET_SIZE(atomic_dec_8)
137
138	ENTRY(atomic_dec_16)
139	ALTENTRY(atomic_dec_ushort)
140	lock
141	decw	(%rdi)
142	ret
143	SET_SIZE(atomic_dec_ushort)
144	SET_SIZE(atomic_dec_16)
145
146	ENTRY(atomic_dec_32)
147	ALTENTRY(atomic_dec_uint)
148	lock
149	decl	(%rdi)
150	ret
151	SET_SIZE(atomic_dec_uint)
152	SET_SIZE(atomic_dec_32)
153
154	ENTRY(atomic_dec_64)
155	ALTENTRY(atomic_dec_ulong)
156	lock
157	decq	(%rdi)
158	ret
159	SET_SIZE(atomic_dec_ulong)
160	SET_SIZE(atomic_dec_64)
161
162	ENTRY(atomic_dec_8_nv)
163	ALTENTRY(atomic_dec_uchar_nv)
164	movb	(%rdi), %al	/ %al = old value
1651:
166	leaq	-1(%rax), %rcx	/ %cl = new value
167	lock
168	cmpxchgb %cl, (%rdi)	/ try to stick it in
169	jne	1b
170	movzbl	%cl, %eax	/ return new value
171	ret
172	SET_SIZE(atomic_dec_uchar_nv)
173	SET_SIZE(atomic_dec_8_nv)
174
175	ENTRY(atomic_dec_16_nv)
176	ALTENTRY(atomic_dec_ushort_nv)
177	movw	(%rdi), %ax	/ %ax = old value
1781:
179	leaq	-1(%rax), %rcx	/ %cx = new value
180	lock
181	cmpxchgw %cx, (%rdi)	/ try to stick it in
182	jne	1b
183	movzwl	%cx, %eax	/ return new value
184	ret
185	SET_SIZE(atomic_dec_ushort_nv)
186	SET_SIZE(atomic_dec_16_nv)
187
188	ENTRY(atomic_dec_32_nv)
189	ALTENTRY(atomic_dec_uint_nv)
190	movl	(%rdi), %eax	/ %eax = old value
1911:
192	leaq	-1(%rax), %rcx	/ %ecx = new value
193	lock
194	cmpxchgl %ecx, (%rdi)	/ try to stick it in
195	jne	1b
196	movl	%ecx, %eax	/ return new value
197	ret
198	SET_SIZE(atomic_dec_uint_nv)
199	SET_SIZE(atomic_dec_32_nv)
200
201	ENTRY(atomic_dec_64_nv)
202	ALTENTRY(atomic_dec_ulong_nv)
203	movq	(%rdi), %rax	/ %rax = old value
2041:
205	leaq	-1(%rax), %rcx	/ %rcx = new value
206	lock
207	cmpxchgq %rcx, (%rdi)	/ try to stick it in
208	jne	1b
209	movq	%rcx, %rax	/ return new value
210	ret
211	SET_SIZE(atomic_dec_ulong_nv)
212	SET_SIZE(atomic_dec_64_nv)
213
214	ENTRY(atomic_add_8)
215	ALTENTRY(atomic_add_char)
216	lock
217	addb	%sil, (%rdi)
218	ret
219	SET_SIZE(atomic_add_char)
220	SET_SIZE(atomic_add_8)
221
222	ENTRY(atomic_add_16)
223	ALTENTRY(atomic_add_short)
224	lock
225	addw	%si, (%rdi)
226	ret
227	SET_SIZE(atomic_add_short)
228	SET_SIZE(atomic_add_16)
229
230	ENTRY(atomic_add_32)
231	ALTENTRY(atomic_add_int)
232	lock
233	addl	%esi, (%rdi)
234	ret
235	SET_SIZE(atomic_add_int)
236	SET_SIZE(atomic_add_32)
237
238	ENTRY(atomic_add_64)
239	ALTENTRY(atomic_add_ptr)
240	ALTENTRY(atomic_add_long)
241	lock
242	addq	%rsi, (%rdi)
243	ret
244	SET_SIZE(atomic_add_long)
245	SET_SIZE(atomic_add_ptr)
246	SET_SIZE(atomic_add_64)
247
248	ENTRY(atomic_or_8)
249	ALTENTRY(atomic_or_uchar)
250	lock
251	orb	%sil, (%rdi)
252	ret
253	SET_SIZE(atomic_or_uchar)
254	SET_SIZE(atomic_or_8)
255
256	ENTRY(atomic_or_16)
257	ALTENTRY(atomic_or_ushort)
258	lock
259	orw	%si, (%rdi)
260	ret
261	SET_SIZE(atomic_or_ushort)
262	SET_SIZE(atomic_or_16)
263
264	ENTRY(atomic_or_32)
265	ALTENTRY(atomic_or_uint)
266	lock
267	orl	%esi, (%rdi)
268	ret
269	SET_SIZE(atomic_or_uint)
270	SET_SIZE(atomic_or_32)
271
272	ENTRY(atomic_or_64)
273	ALTENTRY(atomic_or_ulong)
274	lock
275	orq	%rsi, (%rdi)
276	ret
277	SET_SIZE(atomic_or_ulong)
278	SET_SIZE(atomic_or_64)
279
280	ENTRY(atomic_and_8)
281	ALTENTRY(atomic_and_uchar)
282	lock
283	andb	%sil, (%rdi)
284	ret
285	SET_SIZE(atomic_and_uchar)
286	SET_SIZE(atomic_and_8)
287
288	ENTRY(atomic_and_16)
289	ALTENTRY(atomic_and_ushort)
290	lock
291	andw	%si, (%rdi)
292	ret
293	SET_SIZE(atomic_and_ushort)
294	SET_SIZE(atomic_and_16)
295
296	ENTRY(atomic_and_32)
297	ALTENTRY(atomic_and_uint)
298	lock
299	andl	%esi, (%rdi)
300	ret
301	SET_SIZE(atomic_and_uint)
302	SET_SIZE(atomic_and_32)
303
304	ENTRY(atomic_and_64)
305	ALTENTRY(atomic_and_ulong)
306	lock
307	andq	%rsi, (%rdi)
308	ret
309	SET_SIZE(atomic_and_ulong)
310	SET_SIZE(atomic_and_64)
311
312	ENTRY(atomic_add_8_nv)
313	ALTENTRY(atomic_add_char_nv)
314	movb	(%rdi), %al	/ %al = old value
3151:
316	movb	%sil, %cl
317	addb	%al, %cl	/ %cl = new value
318	lock
319	cmpxchgb %cl, (%rdi)	/ try to stick it in
320	jne	1b
321	movzbl	%cl, %eax	/ return new value
322	ret
323	SET_SIZE(atomic_add_char_nv)
324	SET_SIZE(atomic_add_8_nv)
325
326	ENTRY(atomic_add_16_nv)
327	ALTENTRY(atomic_add_short_nv)
328	movw	(%rdi), %ax	/ %ax = old value
3291:
330	movw	%si, %cx
331	addw	%ax, %cx	/ %cx = new value
332	lock
333	cmpxchgw %cx, (%rdi)	/ try to stick it in
334	jne	1b
335	movzwl	%cx, %eax	/ return new value
336	ret
337	SET_SIZE(atomic_add_short_nv)
338	SET_SIZE(atomic_add_16_nv)
339
340	ENTRY(atomic_add_32_nv)
341	ALTENTRY(atomic_add_int_nv)
342	movl	(%rdi), %eax
3431:
344	movl	%esi, %ecx
345	addl	%eax, %ecx
346	lock
347	cmpxchgl %ecx, (%rdi)
348	jne	1b
349	movl	%ecx, %eax
350	ret
351	SET_SIZE(atomic_add_int_nv)
352	SET_SIZE(atomic_add_32_nv)
353
354	ENTRY(atomic_add_64_nv)
355	ALTENTRY(atomic_add_ptr_nv)
356	ALTENTRY(atomic_add_long_nv)
357	movq	(%rdi), %rax
3581:
359	movq	%rsi, %rcx
360	addq	%rax, %rcx
361	lock
362	cmpxchgq %rcx, (%rdi)
363	jne	1b
364	movq	%rcx, %rax
365	ret
366	SET_SIZE(atomic_add_long_nv)
367	SET_SIZE(atomic_add_ptr_nv)
368	SET_SIZE(atomic_add_64_nv)
369
370	ENTRY(atomic_and_8_nv)
371	ALTENTRY(atomic_and_uchar_nv)
372	movb	(%rdi), %al	/ %al = old value
3731:
374	movb	%sil, %cl
375	andb	%al, %cl	/ %cl = new value
376	lock
377	cmpxchgb %cl, (%rdi)	/ try to stick it in
378	jne	1b
379	movzbl	%cl, %eax	/ return new value
380	ret
381	SET_SIZE(atomic_and_uchar_nv)
382	SET_SIZE(atomic_and_8_nv)
383
384	ENTRY(atomic_and_16_nv)
385	ALTENTRY(atomic_and_ushort_nv)
386	movw	(%rdi), %ax	/ %ax = old value
3871:
388	movw	%si, %cx
389	andw	%ax, %cx	/ %cx = new value
390	lock
391	cmpxchgw %cx, (%rdi)	/ try to stick it in
392	jne	1b
393	movzwl	%cx, %eax	/ return new value
394	ret
395	SET_SIZE(atomic_and_ushort_nv)
396	SET_SIZE(atomic_and_16_nv)
397
398	ENTRY(atomic_and_32_nv)
399	ALTENTRY(atomic_and_uint_nv)
400	movl	(%rdi), %eax
4011:
402	movl	%esi, %ecx
403	andl	%eax, %ecx
404	lock
405	cmpxchgl %ecx, (%rdi)
406	jne	1b
407	movl	%ecx, %eax
408	ret
409	SET_SIZE(atomic_and_uint_nv)
410	SET_SIZE(atomic_and_32_nv)
411
412	ENTRY(atomic_and_64_nv)
413	ALTENTRY(atomic_and_ulong_nv)
414	movq	(%rdi), %rax
4151:
416	movq	%rsi, %rcx
417	andq	%rax, %rcx
418	lock
419	cmpxchgq %rcx, (%rdi)
420	jne	1b
421	movq	%rcx, %rax
422	ret
423	SET_SIZE(atomic_and_ulong_nv)
424	SET_SIZE(atomic_and_64_nv)
425
426	ENTRY(atomic_or_8_nv)
427	ALTENTRY(atomic_or_uchar_nv)
428	movb	(%rdi), %al	/ %al = old value
4291:
430	movb	%sil, %cl
431	orb	%al, %cl	/ %cl = new value
432	lock
433	cmpxchgb %cl, (%rdi)	/ try to stick it in
434	jne	1b
435	movzbl	%cl, %eax	/ return new value
436	ret
437	SET_SIZE(atomic_and_uchar_nv)
438	SET_SIZE(atomic_and_8_nv)
439
440	ENTRY(atomic_or_16_nv)
441	ALTENTRY(atomic_or_ushort_nv)
442	movw	(%rdi), %ax	/ %ax = old value
4431:
444	movw	%si, %cx
445	orw	%ax, %cx	/ %cx = new value
446	lock
447	cmpxchgw %cx, (%rdi)	/ try to stick it in
448	jne	1b
449	movzwl	%cx, %eax	/ return new value
450	ret
451	SET_SIZE(atomic_or_ushort_nv)
452	SET_SIZE(atomic_or_16_nv)
453
454	ENTRY(atomic_or_32_nv)
455	ALTENTRY(atomic_or_uint_nv)
456	movl	(%rdi), %eax
4571:
458	movl	%esi, %ecx
459	orl	%eax, %ecx
460	lock
461	cmpxchgl %ecx, (%rdi)
462	jne	1b
463	movl	%ecx, %eax
464	ret
465	SET_SIZE(atomic_or_uint_nv)
466	SET_SIZE(atomic_or_32_nv)
467
468	ENTRY(atomic_or_64_nv)
469	ALTENTRY(atomic_or_ulong_nv)
470	movq	(%rdi), %rax
4711:
472	movq	%rsi, %rcx
473	orq	%rax, %rcx
474	lock
475	cmpxchgq %rcx, (%rdi)
476	jne	1b
477	movq	%rcx, %rax
478	ret
479	SET_SIZE(atomic_or_ulong_nv)
480	SET_SIZE(atomic_or_64_nv)
481
482	ENTRY(atomic_cas_8)
483	ALTENTRY(atomic_cas_uchar)
484	movzbl	%sil, %eax
485	lock
486	cmpxchgb %dl, (%rdi)
487	ret
488	SET_SIZE(atomic_cas_uchar)
489	SET_SIZE(atomic_cas_8)
490
491	ENTRY(atomic_cas_16)
492	ALTENTRY(atomic_cas_ushort)
493	movzwl	%si, %eax
494	lock
495	cmpxchgw %dx, (%rdi)
496	ret
497	SET_SIZE(atomic_cas_ushort)
498	SET_SIZE(atomic_cas_16)
499
500	ENTRY(atomic_cas_32)
501	ALTENTRY(atomic_cas_uint)
502	movl	%esi, %eax
503	lock
504	cmpxchgl %edx, (%rdi)
505	ret
506	SET_SIZE(atomic_cas_uint)
507	SET_SIZE(atomic_cas_32)
508
509	ENTRY(atomic_cas_64)
510	ALTENTRY(atomic_cas_ulong)
511	ALTENTRY(atomic_cas_ptr)
512	movq	%rsi, %rax
513	lock
514	cmpxchgq %rdx, (%rdi)
515	ret
516	SET_SIZE(atomic_cas_ptr)
517	SET_SIZE(atomic_cas_ulong)
518	SET_SIZE(atomic_cas_64)
519
520	ENTRY(atomic_swap_8)
521	ALTENTRY(atomic_swap_uchar)
522	movzbl	%sil, %eax
523	lock
524	xchgb %al, (%rdi)
525	ret
526	SET_SIZE(atomic_swap_uchar)
527	SET_SIZE(atomic_swap_8)
528
529	ENTRY(atomic_swap_16)
530	ALTENTRY(atomic_swap_ushort)
531	movzwl	%si, %eax
532	lock
533	xchgw %ax, (%rdi)
534	ret
535	SET_SIZE(atomic_swap_ushort)
536	SET_SIZE(atomic_swap_16)
537
538	ENTRY(atomic_swap_32)
539	ALTENTRY(atomic_swap_uint)
540	movl	%esi, %eax
541	lock
542	xchgl %eax, (%rdi)
543	ret
544	SET_SIZE(atomic_swap_uint)
545	SET_SIZE(atomic_swap_32)
546
547	ENTRY(atomic_swap_64)
548	ALTENTRY(atomic_swap_ulong)
549	ALTENTRY(atomic_swap_ptr)
550	movq	%rsi, %rax
551	lock
552	xchgq %rax, (%rdi)
553	ret
554	SET_SIZE(atomic_swap_ptr)
555	SET_SIZE(atomic_swap_ulong)
556	SET_SIZE(atomic_swap_64)
557
558	ENTRY(atomic_set_long_excl)
559	xorl	%eax, %eax
560	lock
561	btsq	%rsi, (%rdi)
562	jnc	1f
563	decl	%eax			/ return -1
5641:
565	ret
566	SET_SIZE(atomic_set_long_excl)
567
568	ENTRY(atomic_clear_long_excl)
569	xorl	%eax, %eax
570	lock
571	btrq	%rsi, (%rdi)
572	jc	1f
573	decl	%eax			/ return -1
5741:
575	ret
576	SET_SIZE(atomic_clear_long_excl)
577
578#if !defined(_KERNEL)
579
580	/*
581	 * NOTE: membar_enter, and membar_exit are identical routines.
582	 * We define them separately, instead of using an ALTENTRY
583	 * definitions to alias them together, so that DTrace and
584	 * debuggers will see a unique address for them, allowing
585	 * more accurate tracing.
586	*/
587
588	ENTRY(membar_enter)
589	mfence
590	ret
591	SET_SIZE(membar_enter)
592
593	ENTRY(membar_exit)
594	mfence
595	ret
596	SET_SIZE(membar_exit)
597
598	ENTRY(membar_producer)
599	sfence
600	ret
601	SET_SIZE(membar_producer)
602
603	ENTRY(membar_consumer)
604	lfence
605	ret
606	SET_SIZE(membar_consumer)
607
608#endif	/* !_KERNEL */
609