xref: /titanic_52/usr/src/common/atomic/amd64/atomic.s (revision 263f549e5da8b32c4922f586afb365b8ae388a6c)
1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21
22/*
23 * Copyright (c) 2004, 2010, Oracle and/or its affiliates. All rights reserved.
24 */
25
26	.file	"atomic.s"
27
28#include <sys/asm_linkage.h>
29
30#if defined(_KERNEL)
31	/*
32	 * Legacy kernel interfaces; they will go away the moment our closed
33	 * bins no longer require them.
34	 */
35	ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
36	ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
37	ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
38	ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
39	ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
40	ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
41	ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
42#endif
43
44	ENTRY(atomic_inc_8)
45	ALTENTRY(atomic_inc_uchar)
46	lock
47	incb	(%rdi)
48	ret
49	SET_SIZE(atomic_inc_uchar)
50	SET_SIZE(atomic_inc_8)
51
52	ENTRY(atomic_inc_16)
53	ALTENTRY(atomic_inc_ushort)
54	lock
55	incw	(%rdi)
56	ret
57	SET_SIZE(atomic_inc_ushort)
58	SET_SIZE(atomic_inc_16)
59
60	ENTRY(atomic_inc_32)
61	ALTENTRY(atomic_inc_uint)
62	lock
63	incl	(%rdi)
64	ret
65	SET_SIZE(atomic_inc_uint)
66	SET_SIZE(atomic_inc_32)
67
68	ENTRY(atomic_inc_64)
69	ALTENTRY(atomic_inc_ulong)
70	lock
71	incq	(%rdi)
72	ret
73	SET_SIZE(atomic_inc_ulong)
74	SET_SIZE(atomic_inc_64)
75
76	ENTRY(atomic_inc_8_nv)
77	ALTENTRY(atomic_inc_uchar_nv)
78	xorl	%eax, %eax	/ clear upper bits of %eax return register
79	incb	%al		/ %al = 1
80	lock
81	  xaddb	%al, (%rdi)	/ %al = old value, (%rdi) = new value
82	incb	%al		/ return new value
83	ret
84	SET_SIZE(atomic_inc_uchar_nv)
85	SET_SIZE(atomic_inc_8_nv)
86
87	ENTRY(atomic_inc_16_nv)
88	ALTENTRY(atomic_inc_ushort_nv)
89	xorl	%eax, %eax	/ clear upper bits of %eax return register
90	incw	%ax		/ %ax = 1
91	lock
92	  xaddw	%ax, (%rdi)	/ %ax = old value, (%rdi) = new value
93	incw	%ax		/ return new value
94	ret
95	SET_SIZE(atomic_inc_ushort_nv)
96	SET_SIZE(atomic_inc_16_nv)
97
98	ENTRY(atomic_inc_32_nv)
99	ALTENTRY(atomic_inc_uint_nv)
100	xorl	%eax, %eax	/ %eax = 0
101	incl	%eax		/ %eax = 1
102	lock
103	  xaddl	%eax, (%rdi)	/ %eax = old value, (%rdi) = new value
104	incl	%eax		/ return new value
105	ret
106	SET_SIZE(atomic_inc_uint_nv)
107	SET_SIZE(atomic_inc_32_nv)
108
109	ENTRY(atomic_inc_64_nv)
110	ALTENTRY(atomic_inc_ulong_nv)
111	xorq	%rax, %rax	/ %rax = 0
112	incq	%rax		/ %rax = 1
113	lock
114	  xaddq	%rax, (%rdi)	/ %rax = old value, (%rdi) = new value
115	incq	%rax		/ return new value
116	ret
117	SET_SIZE(atomic_inc_ulong_nv)
118	SET_SIZE(atomic_inc_64_nv)
119
120	ENTRY(atomic_dec_8)
121	ALTENTRY(atomic_dec_uchar)
122	lock
123	decb	(%rdi)
124	ret
125	SET_SIZE(atomic_dec_uchar)
126	SET_SIZE(atomic_dec_8)
127
128	ENTRY(atomic_dec_16)
129	ALTENTRY(atomic_dec_ushort)
130	lock
131	decw	(%rdi)
132	ret
133	SET_SIZE(atomic_dec_ushort)
134	SET_SIZE(atomic_dec_16)
135
136	ENTRY(atomic_dec_32)
137	ALTENTRY(atomic_dec_uint)
138	lock
139	decl	(%rdi)
140	ret
141	SET_SIZE(atomic_dec_uint)
142	SET_SIZE(atomic_dec_32)
143
144	ENTRY(atomic_dec_64)
145	ALTENTRY(atomic_dec_ulong)
146	lock
147	decq	(%rdi)
148	ret
149	SET_SIZE(atomic_dec_ulong)
150	SET_SIZE(atomic_dec_64)
151
152	ENTRY(atomic_dec_8_nv)
153	ALTENTRY(atomic_dec_uchar_nv)
154	xorl	%eax, %eax	/ clear upper bits of %eax return register
155	decb	%al		/ %al = -1
156	lock
157	  xaddb	%al, (%rdi)	/ %al = old value, (%rdi) = new value
158	decb	%al		/ return new value
159	ret
160	SET_SIZE(atomic_dec_uchar_nv)
161	SET_SIZE(atomic_dec_8_nv)
162
163	ENTRY(atomic_dec_16_nv)
164	ALTENTRY(atomic_dec_ushort_nv)
165	xorl	%eax, %eax	/ clear upper bits of %eax return register
166	decw	%ax		/ %ax = -1
167	lock
168	  xaddw	%ax, (%rdi)	/ %ax = old value, (%rdi) = new value
169	decw	%ax		/ return new value
170	ret
171	SET_SIZE(atomic_dec_ushort_nv)
172	SET_SIZE(atomic_dec_16_nv)
173
174	ENTRY(atomic_dec_32_nv)
175	ALTENTRY(atomic_dec_uint_nv)
176	xorl	%eax, %eax	/ %eax = 0
177	decl	%eax		/ %eax = -1
178	lock
179	  xaddl	%eax, (%rdi)	/ %eax = old value, (%rdi) = new value
180	decl	%eax		/ return new value
181	ret
182	SET_SIZE(atomic_dec_uint_nv)
183	SET_SIZE(atomic_dec_32_nv)
184
185	ENTRY(atomic_dec_64_nv)
186	ALTENTRY(atomic_dec_ulong_nv)
187	xorq	%rax, %rax	/ %rax = 0
188	decq	%rax		/ %rax = -1
189	lock
190	  xaddq	%rax, (%rdi)	/ %rax = old value, (%rdi) = new value
191	decq	%rax		/ return new value
192	ret
193	SET_SIZE(atomic_dec_ulong_nv)
194	SET_SIZE(atomic_dec_64_nv)
195
196	ENTRY(atomic_add_8)
197	ALTENTRY(atomic_add_char)
198	lock
199	addb	%sil, (%rdi)
200	ret
201	SET_SIZE(atomic_add_char)
202	SET_SIZE(atomic_add_8)
203
204	ENTRY(atomic_add_16)
205	ALTENTRY(atomic_add_short)
206	lock
207	addw	%si, (%rdi)
208	ret
209	SET_SIZE(atomic_add_short)
210	SET_SIZE(atomic_add_16)
211
212	ENTRY(atomic_add_32)
213	ALTENTRY(atomic_add_int)
214	lock
215	addl	%esi, (%rdi)
216	ret
217	SET_SIZE(atomic_add_int)
218	SET_SIZE(atomic_add_32)
219
220	ENTRY(atomic_add_64)
221	ALTENTRY(atomic_add_ptr)
222	ALTENTRY(atomic_add_long)
223	lock
224	addq	%rsi, (%rdi)
225	ret
226	SET_SIZE(atomic_add_long)
227	SET_SIZE(atomic_add_ptr)
228	SET_SIZE(atomic_add_64)
229
230	ENTRY(atomic_or_8)
231	ALTENTRY(atomic_or_uchar)
232	lock
233	orb	%sil, (%rdi)
234	ret
235	SET_SIZE(atomic_or_uchar)
236	SET_SIZE(atomic_or_8)
237
238	ENTRY(atomic_or_16)
239	ALTENTRY(atomic_or_ushort)
240	lock
241	orw	%si, (%rdi)
242	ret
243	SET_SIZE(atomic_or_ushort)
244	SET_SIZE(atomic_or_16)
245
246	ENTRY(atomic_or_32)
247	ALTENTRY(atomic_or_uint)
248	lock
249	orl	%esi, (%rdi)
250	ret
251	SET_SIZE(atomic_or_uint)
252	SET_SIZE(atomic_or_32)
253
254	ENTRY(atomic_or_64)
255	ALTENTRY(atomic_or_ulong)
256	lock
257	orq	%rsi, (%rdi)
258	ret
259	SET_SIZE(atomic_or_ulong)
260	SET_SIZE(atomic_or_64)
261
262	ENTRY(atomic_and_8)
263	ALTENTRY(atomic_and_uchar)
264	lock
265	andb	%sil, (%rdi)
266	ret
267	SET_SIZE(atomic_and_uchar)
268	SET_SIZE(atomic_and_8)
269
270	ENTRY(atomic_and_16)
271	ALTENTRY(atomic_and_ushort)
272	lock
273	andw	%si, (%rdi)
274	ret
275	SET_SIZE(atomic_and_ushort)
276	SET_SIZE(atomic_and_16)
277
278	ENTRY(atomic_and_32)
279	ALTENTRY(atomic_and_uint)
280	lock
281	andl	%esi, (%rdi)
282	ret
283	SET_SIZE(atomic_and_uint)
284	SET_SIZE(atomic_and_32)
285
286	ENTRY(atomic_and_64)
287	ALTENTRY(atomic_and_ulong)
288	lock
289	andq	%rsi, (%rdi)
290	ret
291	SET_SIZE(atomic_and_ulong)
292	SET_SIZE(atomic_and_64)
293
294	ENTRY(atomic_add_8_nv)
295	ALTENTRY(atomic_add_char_nv)
296	movzbl	%sil, %eax		/ %al = delta addend, clear upper bits
297	lock
298	  xaddb	%sil, (%rdi)		/ %sil = old value, (%rdi) = sum
299	addb	%sil, %al		/ new value = original value + delta
300	ret
301	SET_SIZE(atomic_add_char_nv)
302	SET_SIZE(atomic_add_8_nv)
303
304	ENTRY(atomic_add_16_nv)
305	ALTENTRY(atomic_add_short_nv)
306	movzwl	%si, %eax		/ %ax = delta addend, clean upper bits
307	lock
308	  xaddw	%si, (%rdi)		/ %si = old value, (%rdi) = sum
309	addw	%si, %ax		/ new value = original value + delta
310	ret
311	SET_SIZE(atomic_add_short_nv)
312	SET_SIZE(atomic_add_16_nv)
313
314	ENTRY(atomic_add_32_nv)
315	ALTENTRY(atomic_add_int_nv)
316	mov	%esi, %eax		/ %eax = delta addend
317	lock
318	  xaddl	%esi, (%rdi)		/ %esi = old value, (%rdi) = sum
319	add	%esi, %eax		/ new value = original value + delta
320	ret
321	SET_SIZE(atomic_add_int_nv)
322	SET_SIZE(atomic_add_32_nv)
323
324	ENTRY(atomic_add_64_nv)
325	ALTENTRY(atomic_add_ptr_nv)
326	ALTENTRY(atomic_add_long_nv)
327	mov	%rsi, %rax		/ %rax = delta addend
328	lock
329	  xaddq	%rsi, (%rdi)		/ %rsi = old value, (%rdi) = sum
330	addq	%rsi, %rax		/ new value = original value + delta
331	ret
332	SET_SIZE(atomic_add_long_nv)
333	SET_SIZE(atomic_add_ptr_nv)
334	SET_SIZE(atomic_add_64_nv)
335
336	ENTRY(atomic_and_8_nv)
337	ALTENTRY(atomic_and_uchar_nv)
338	movb	(%rdi), %al	/ %al = old value
3391:
340	movb	%sil, %cl
341	andb	%al, %cl	/ %cl = new value
342	lock
343	cmpxchgb %cl, (%rdi)	/ try to stick it in
344	jne	1b
345	movzbl	%cl, %eax	/ return new value
346	ret
347	SET_SIZE(atomic_and_uchar_nv)
348	SET_SIZE(atomic_and_8_nv)
349
350	ENTRY(atomic_and_16_nv)
351	ALTENTRY(atomic_and_ushort_nv)
352	movw	(%rdi), %ax	/ %ax = old value
3531:
354	movw	%si, %cx
355	andw	%ax, %cx	/ %cx = new value
356	lock
357	cmpxchgw %cx, (%rdi)	/ try to stick it in
358	jne	1b
359	movzwl	%cx, %eax	/ return new value
360	ret
361	SET_SIZE(atomic_and_ushort_nv)
362	SET_SIZE(atomic_and_16_nv)
363
364	ENTRY(atomic_and_32_nv)
365	ALTENTRY(atomic_and_uint_nv)
366	movl	(%rdi), %eax
3671:
368	movl	%esi, %ecx
369	andl	%eax, %ecx
370	lock
371	cmpxchgl %ecx, (%rdi)
372	jne	1b
373	movl	%ecx, %eax
374	ret
375	SET_SIZE(atomic_and_uint_nv)
376	SET_SIZE(atomic_and_32_nv)
377
378	ENTRY(atomic_and_64_nv)
379	ALTENTRY(atomic_and_ulong_nv)
380	movq	(%rdi), %rax
3811:
382	movq	%rsi, %rcx
383	andq	%rax, %rcx
384	lock
385	cmpxchgq %rcx, (%rdi)
386	jne	1b
387	movq	%rcx, %rax
388	ret
389	SET_SIZE(atomic_and_ulong_nv)
390	SET_SIZE(atomic_and_64_nv)
391
392	ENTRY(atomic_or_8_nv)
393	ALTENTRY(atomic_or_uchar_nv)
394	movb	(%rdi), %al	/ %al = old value
3951:
396	movb	%sil, %cl
397	orb	%al, %cl	/ %cl = new value
398	lock
399	cmpxchgb %cl, (%rdi)	/ try to stick it in
400	jne	1b
401	movzbl	%cl, %eax	/ return new value
402	ret
403	SET_SIZE(atomic_or_uchar_nv)
404	SET_SIZE(atomic_or_8_nv)
405
406	ENTRY(atomic_or_16_nv)
407	ALTENTRY(atomic_or_ushort_nv)
408	movw	(%rdi), %ax	/ %ax = old value
4091:
410	movw	%si, %cx
411	orw	%ax, %cx	/ %cx = new value
412	lock
413	cmpxchgw %cx, (%rdi)	/ try to stick it in
414	jne	1b
415	movzwl	%cx, %eax	/ return new value
416	ret
417	SET_SIZE(atomic_or_ushort_nv)
418	SET_SIZE(atomic_or_16_nv)
419
420	ENTRY(atomic_or_32_nv)
421	ALTENTRY(atomic_or_uint_nv)
422	movl	(%rdi), %eax
4231:
424	movl	%esi, %ecx
425	orl	%eax, %ecx
426	lock
427	cmpxchgl %ecx, (%rdi)
428	jne	1b
429	movl	%ecx, %eax
430	ret
431	SET_SIZE(atomic_or_uint_nv)
432	SET_SIZE(atomic_or_32_nv)
433
434	ENTRY(atomic_or_64_nv)
435	ALTENTRY(atomic_or_ulong_nv)
436	movq	(%rdi), %rax
4371:
438	movq	%rsi, %rcx
439	orq	%rax, %rcx
440	lock
441	cmpxchgq %rcx, (%rdi)
442	jne	1b
443	movq	%rcx, %rax
444	ret
445	SET_SIZE(atomic_or_ulong_nv)
446	SET_SIZE(atomic_or_64_nv)
447
448	ENTRY(atomic_cas_8)
449	ALTENTRY(atomic_cas_uchar)
450	movzbl	%sil, %eax
451	lock
452	cmpxchgb %dl, (%rdi)
453	ret
454	SET_SIZE(atomic_cas_uchar)
455	SET_SIZE(atomic_cas_8)
456
457	ENTRY(atomic_cas_16)
458	ALTENTRY(atomic_cas_ushort)
459	movzwl	%si, %eax
460	lock
461	cmpxchgw %dx, (%rdi)
462	ret
463	SET_SIZE(atomic_cas_ushort)
464	SET_SIZE(atomic_cas_16)
465
466	ENTRY(atomic_cas_32)
467	ALTENTRY(atomic_cas_uint)
468	movl	%esi, %eax
469	lock
470	cmpxchgl %edx, (%rdi)
471	ret
472	SET_SIZE(atomic_cas_uint)
473	SET_SIZE(atomic_cas_32)
474
475	ENTRY(atomic_cas_64)
476	ALTENTRY(atomic_cas_ulong)
477	ALTENTRY(atomic_cas_ptr)
478	movq	%rsi, %rax
479	lock
480	cmpxchgq %rdx, (%rdi)
481	ret
482	SET_SIZE(atomic_cas_ptr)
483	SET_SIZE(atomic_cas_ulong)
484	SET_SIZE(atomic_cas_64)
485
486	ENTRY(atomic_swap_8)
487	ALTENTRY(atomic_swap_uchar)
488	movzbl	%sil, %eax
489	lock
490	xchgb %al, (%rdi)
491	ret
492	SET_SIZE(atomic_swap_uchar)
493	SET_SIZE(atomic_swap_8)
494
495	ENTRY(atomic_swap_16)
496	ALTENTRY(atomic_swap_ushort)
497	movzwl	%si, %eax
498	lock
499	xchgw %ax, (%rdi)
500	ret
501	SET_SIZE(atomic_swap_ushort)
502	SET_SIZE(atomic_swap_16)
503
504	ENTRY(atomic_swap_32)
505	ALTENTRY(atomic_swap_uint)
506	movl	%esi, %eax
507	lock
508	xchgl %eax, (%rdi)
509	ret
510	SET_SIZE(atomic_swap_uint)
511	SET_SIZE(atomic_swap_32)
512
513	ENTRY(atomic_swap_64)
514	ALTENTRY(atomic_swap_ulong)
515	ALTENTRY(atomic_swap_ptr)
516	movq	%rsi, %rax
517	lock
518	xchgq %rax, (%rdi)
519	ret
520	SET_SIZE(atomic_swap_ptr)
521	SET_SIZE(atomic_swap_ulong)
522	SET_SIZE(atomic_swap_64)
523
524	ENTRY(atomic_set_long_excl)
525	xorl	%eax, %eax
526	lock
527	btsq	%rsi, (%rdi)
528	jnc	1f
529	decl	%eax			/ return -1
5301:
531	ret
532	SET_SIZE(atomic_set_long_excl)
533
534	ENTRY(atomic_clear_long_excl)
535	xorl	%eax, %eax
536	lock
537	btrq	%rsi, (%rdi)
538	jc	1f
539	decl	%eax			/ return -1
5401:
541	ret
542	SET_SIZE(atomic_clear_long_excl)
543
544#if !defined(_KERNEL)
545
546	/*
547	 * NOTE: membar_enter, and membar_exit are identical routines.
548	 * We define them separately, instead of using an ALTENTRY
549	 * definitions to alias them together, so that DTrace and
550	 * debuggers will see a unique address for them, allowing
551	 * more accurate tracing.
552	*/
553
554	ENTRY(membar_enter)
555	mfence
556	ret
557	SET_SIZE(membar_enter)
558
559	ENTRY(membar_exit)
560	mfence
561	ret
562	SET_SIZE(membar_exit)
563
564	ENTRY(membar_producer)
565	sfence
566	ret
567	SET_SIZE(membar_producer)
568
569	ENTRY(membar_consumer)
570	lfence
571	ret
572	SET_SIZE(membar_consumer)
573
574#endif	/* !_KERNEL */
575