xref: /titanic_41/usr/src/common/atomic/amd64/atomic.s (revision 5203bc321053fb87d7073c7640548fab73634793)
1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21
22/*
23 * Copyright (c) 2004, 2010, Oracle and/or its affiliates. All rights reserved.
24 */
25
26	.file	"atomic.s"
27
28#include <sys/asm_linkage.h>
29
30#if defined(_KERNEL)
31	/*
32	 * Legacy kernel interfaces; they will go away (eventually).
33	 */
34	ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
35	ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
36	ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
37	ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
38	ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
39	ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
40	ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
41#endif
42
43	ENTRY(atomic_inc_8)
44	ALTENTRY(atomic_inc_uchar)
45	lock
46	incb	(%rdi)
47	ret
48	SET_SIZE(atomic_inc_uchar)
49	SET_SIZE(atomic_inc_8)
50
51	ENTRY(atomic_inc_16)
52	ALTENTRY(atomic_inc_ushort)
53	lock
54	incw	(%rdi)
55	ret
56	SET_SIZE(atomic_inc_ushort)
57	SET_SIZE(atomic_inc_16)
58
59	ENTRY(atomic_inc_32)
60	ALTENTRY(atomic_inc_uint)
61	lock
62	incl	(%rdi)
63	ret
64	SET_SIZE(atomic_inc_uint)
65	SET_SIZE(atomic_inc_32)
66
67	ENTRY(atomic_inc_64)
68	ALTENTRY(atomic_inc_ulong)
69	lock
70	incq	(%rdi)
71	ret
72	SET_SIZE(atomic_inc_ulong)
73	SET_SIZE(atomic_inc_64)
74
75	ENTRY(atomic_inc_8_nv)
76	ALTENTRY(atomic_inc_uchar_nv)
77	xorl	%eax, %eax	/ clear upper bits of %eax return register
78	incb	%al		/ %al = 1
79	lock
80	  xaddb	%al, (%rdi)	/ %al = old value, (%rdi) = new value
81	incb	%al		/ return new value
82	ret
83	SET_SIZE(atomic_inc_uchar_nv)
84	SET_SIZE(atomic_inc_8_nv)
85
86	ENTRY(atomic_inc_16_nv)
87	ALTENTRY(atomic_inc_ushort_nv)
88	xorl	%eax, %eax	/ clear upper bits of %eax return register
89	incw	%ax		/ %ax = 1
90	lock
91	  xaddw	%ax, (%rdi)	/ %ax = old value, (%rdi) = new value
92	incw	%ax		/ return new value
93	ret
94	SET_SIZE(atomic_inc_ushort_nv)
95	SET_SIZE(atomic_inc_16_nv)
96
97	ENTRY(atomic_inc_32_nv)
98	ALTENTRY(atomic_inc_uint_nv)
99	xorl	%eax, %eax	/ %eax = 0
100	incl	%eax		/ %eax = 1
101	lock
102	  xaddl	%eax, (%rdi)	/ %eax = old value, (%rdi) = new value
103	incl	%eax		/ return new value
104	ret
105	SET_SIZE(atomic_inc_uint_nv)
106	SET_SIZE(atomic_inc_32_nv)
107
108	ENTRY(atomic_inc_64_nv)
109	ALTENTRY(atomic_inc_ulong_nv)
110	xorq	%rax, %rax	/ %rax = 0
111	incq	%rax		/ %rax = 1
112	lock
113	  xaddq	%rax, (%rdi)	/ %rax = old value, (%rdi) = new value
114	incq	%rax		/ return new value
115	ret
116	SET_SIZE(atomic_inc_ulong_nv)
117	SET_SIZE(atomic_inc_64_nv)
118
119	ENTRY(atomic_dec_8)
120	ALTENTRY(atomic_dec_uchar)
121	lock
122	decb	(%rdi)
123	ret
124	SET_SIZE(atomic_dec_uchar)
125	SET_SIZE(atomic_dec_8)
126
127	ENTRY(atomic_dec_16)
128	ALTENTRY(atomic_dec_ushort)
129	lock
130	decw	(%rdi)
131	ret
132	SET_SIZE(atomic_dec_ushort)
133	SET_SIZE(atomic_dec_16)
134
135	ENTRY(atomic_dec_32)
136	ALTENTRY(atomic_dec_uint)
137	lock
138	decl	(%rdi)
139	ret
140	SET_SIZE(atomic_dec_uint)
141	SET_SIZE(atomic_dec_32)
142
143	ENTRY(atomic_dec_64)
144	ALTENTRY(atomic_dec_ulong)
145	lock
146	decq	(%rdi)
147	ret
148	SET_SIZE(atomic_dec_ulong)
149	SET_SIZE(atomic_dec_64)
150
151	ENTRY(atomic_dec_8_nv)
152	ALTENTRY(atomic_dec_uchar_nv)
153	xorl	%eax, %eax	/ clear upper bits of %eax return register
154	decb	%al		/ %al = -1
155	lock
156	  xaddb	%al, (%rdi)	/ %al = old value, (%rdi) = new value
157	decb	%al		/ return new value
158	ret
159	SET_SIZE(atomic_dec_uchar_nv)
160	SET_SIZE(atomic_dec_8_nv)
161
162	ENTRY(atomic_dec_16_nv)
163	ALTENTRY(atomic_dec_ushort_nv)
164	xorl	%eax, %eax	/ clear upper bits of %eax return register
165	decw	%ax		/ %ax = -1
166	lock
167	  xaddw	%ax, (%rdi)	/ %ax = old value, (%rdi) = new value
168	decw	%ax		/ return new value
169	ret
170	SET_SIZE(atomic_dec_ushort_nv)
171	SET_SIZE(atomic_dec_16_nv)
172
173	ENTRY(atomic_dec_32_nv)
174	ALTENTRY(atomic_dec_uint_nv)
175	xorl	%eax, %eax	/ %eax = 0
176	decl	%eax		/ %eax = -1
177	lock
178	  xaddl	%eax, (%rdi)	/ %eax = old value, (%rdi) = new value
179	decl	%eax		/ return new value
180	ret
181	SET_SIZE(atomic_dec_uint_nv)
182	SET_SIZE(atomic_dec_32_nv)
183
184	ENTRY(atomic_dec_64_nv)
185	ALTENTRY(atomic_dec_ulong_nv)
186	xorq	%rax, %rax	/ %rax = 0
187	decq	%rax		/ %rax = -1
188	lock
189	  xaddq	%rax, (%rdi)	/ %rax = old value, (%rdi) = new value
190	decq	%rax		/ return new value
191	ret
192	SET_SIZE(atomic_dec_ulong_nv)
193	SET_SIZE(atomic_dec_64_nv)
194
195	ENTRY(atomic_add_8)
196	ALTENTRY(atomic_add_char)
197	lock
198	addb	%sil, (%rdi)
199	ret
200	SET_SIZE(atomic_add_char)
201	SET_SIZE(atomic_add_8)
202
203	ENTRY(atomic_add_16)
204	ALTENTRY(atomic_add_short)
205	lock
206	addw	%si, (%rdi)
207	ret
208	SET_SIZE(atomic_add_short)
209	SET_SIZE(atomic_add_16)
210
211	ENTRY(atomic_add_32)
212	ALTENTRY(atomic_add_int)
213	lock
214	addl	%esi, (%rdi)
215	ret
216	SET_SIZE(atomic_add_int)
217	SET_SIZE(atomic_add_32)
218
219	ENTRY(atomic_add_64)
220	ALTENTRY(atomic_add_ptr)
221	ALTENTRY(atomic_add_long)
222	lock
223	addq	%rsi, (%rdi)
224	ret
225	SET_SIZE(atomic_add_long)
226	SET_SIZE(atomic_add_ptr)
227	SET_SIZE(atomic_add_64)
228
229	ENTRY(atomic_or_8)
230	ALTENTRY(atomic_or_uchar)
231	lock
232	orb	%sil, (%rdi)
233	ret
234	SET_SIZE(atomic_or_uchar)
235	SET_SIZE(atomic_or_8)
236
237	ENTRY(atomic_or_16)
238	ALTENTRY(atomic_or_ushort)
239	lock
240	orw	%si, (%rdi)
241	ret
242	SET_SIZE(atomic_or_ushort)
243	SET_SIZE(atomic_or_16)
244
245	ENTRY(atomic_or_32)
246	ALTENTRY(atomic_or_uint)
247	lock
248	orl	%esi, (%rdi)
249	ret
250	SET_SIZE(atomic_or_uint)
251	SET_SIZE(atomic_or_32)
252
253	ENTRY(atomic_or_64)
254	ALTENTRY(atomic_or_ulong)
255	lock
256	orq	%rsi, (%rdi)
257	ret
258	SET_SIZE(atomic_or_ulong)
259	SET_SIZE(atomic_or_64)
260
261	ENTRY(atomic_and_8)
262	ALTENTRY(atomic_and_uchar)
263	lock
264	andb	%sil, (%rdi)
265	ret
266	SET_SIZE(atomic_and_uchar)
267	SET_SIZE(atomic_and_8)
268
269	ENTRY(atomic_and_16)
270	ALTENTRY(atomic_and_ushort)
271	lock
272	andw	%si, (%rdi)
273	ret
274	SET_SIZE(atomic_and_ushort)
275	SET_SIZE(atomic_and_16)
276
277	ENTRY(atomic_and_32)
278	ALTENTRY(atomic_and_uint)
279	lock
280	andl	%esi, (%rdi)
281	ret
282	SET_SIZE(atomic_and_uint)
283	SET_SIZE(atomic_and_32)
284
285	ENTRY(atomic_and_64)
286	ALTENTRY(atomic_and_ulong)
287	lock
288	andq	%rsi, (%rdi)
289	ret
290	SET_SIZE(atomic_and_ulong)
291	SET_SIZE(atomic_and_64)
292
293	ENTRY(atomic_add_8_nv)
294	ALTENTRY(atomic_add_char_nv)
295	movzbl	%sil, %eax		/ %al = delta addend, clear upper bits
296	lock
297	  xaddb	%sil, (%rdi)		/ %sil = old value, (%rdi) = sum
298	addb	%sil, %al		/ new value = original value + delta
299	ret
300	SET_SIZE(atomic_add_char_nv)
301	SET_SIZE(atomic_add_8_nv)
302
303	ENTRY(atomic_add_16_nv)
304	ALTENTRY(atomic_add_short_nv)
305	movzwl	%si, %eax		/ %ax = delta addend, clean upper bits
306	lock
307	  xaddw	%si, (%rdi)		/ %si = old value, (%rdi) = sum
308	addw	%si, %ax		/ new value = original value + delta
309	ret
310	SET_SIZE(atomic_add_short_nv)
311	SET_SIZE(atomic_add_16_nv)
312
313	ENTRY(atomic_add_32_nv)
314	ALTENTRY(atomic_add_int_nv)
315	mov	%esi, %eax		/ %eax = delta addend
316	lock
317	  xaddl	%esi, (%rdi)		/ %esi = old value, (%rdi) = sum
318	add	%esi, %eax		/ new value = original value + delta
319	ret
320	SET_SIZE(atomic_add_int_nv)
321	SET_SIZE(atomic_add_32_nv)
322
323	ENTRY(atomic_add_64_nv)
324	ALTENTRY(atomic_add_ptr_nv)
325	ALTENTRY(atomic_add_long_nv)
326	mov	%rsi, %rax		/ %rax = delta addend
327	lock
328	  xaddq	%rsi, (%rdi)		/ %rsi = old value, (%rdi) = sum
329	addq	%rsi, %rax		/ new value = original value + delta
330	ret
331	SET_SIZE(atomic_add_long_nv)
332	SET_SIZE(atomic_add_ptr_nv)
333	SET_SIZE(atomic_add_64_nv)
334
335	ENTRY(atomic_and_8_nv)
336	ALTENTRY(atomic_and_uchar_nv)
337	movb	(%rdi), %al	/ %al = old value
3381:
339	movb	%sil, %cl
340	andb	%al, %cl	/ %cl = new value
341	lock
342	cmpxchgb %cl, (%rdi)	/ try to stick it in
343	jne	1b
344	movzbl	%cl, %eax	/ return new value
345	ret
346	SET_SIZE(atomic_and_uchar_nv)
347	SET_SIZE(atomic_and_8_nv)
348
349	ENTRY(atomic_and_16_nv)
350	ALTENTRY(atomic_and_ushort_nv)
351	movw	(%rdi), %ax	/ %ax = old value
3521:
353	movw	%si, %cx
354	andw	%ax, %cx	/ %cx = new value
355	lock
356	cmpxchgw %cx, (%rdi)	/ try to stick it in
357	jne	1b
358	movzwl	%cx, %eax	/ return new value
359	ret
360	SET_SIZE(atomic_and_ushort_nv)
361	SET_SIZE(atomic_and_16_nv)
362
363	ENTRY(atomic_and_32_nv)
364	ALTENTRY(atomic_and_uint_nv)
365	movl	(%rdi), %eax
3661:
367	movl	%esi, %ecx
368	andl	%eax, %ecx
369	lock
370	cmpxchgl %ecx, (%rdi)
371	jne	1b
372	movl	%ecx, %eax
373	ret
374	SET_SIZE(atomic_and_uint_nv)
375	SET_SIZE(atomic_and_32_nv)
376
377	ENTRY(atomic_and_64_nv)
378	ALTENTRY(atomic_and_ulong_nv)
379	movq	(%rdi), %rax
3801:
381	movq	%rsi, %rcx
382	andq	%rax, %rcx
383	lock
384	cmpxchgq %rcx, (%rdi)
385	jne	1b
386	movq	%rcx, %rax
387	ret
388	SET_SIZE(atomic_and_ulong_nv)
389	SET_SIZE(atomic_and_64_nv)
390
391	ENTRY(atomic_or_8_nv)
392	ALTENTRY(atomic_or_uchar_nv)
393	movb	(%rdi), %al	/ %al = old value
3941:
395	movb	%sil, %cl
396	orb	%al, %cl	/ %cl = new value
397	lock
398	cmpxchgb %cl, (%rdi)	/ try to stick it in
399	jne	1b
400	movzbl	%cl, %eax	/ return new value
401	ret
402	SET_SIZE(atomic_or_uchar_nv)
403	SET_SIZE(atomic_or_8_nv)
404
405	ENTRY(atomic_or_16_nv)
406	ALTENTRY(atomic_or_ushort_nv)
407	movw	(%rdi), %ax	/ %ax = old value
4081:
409	movw	%si, %cx
410	orw	%ax, %cx	/ %cx = new value
411	lock
412	cmpxchgw %cx, (%rdi)	/ try to stick it in
413	jne	1b
414	movzwl	%cx, %eax	/ return new value
415	ret
416	SET_SIZE(atomic_or_ushort_nv)
417	SET_SIZE(atomic_or_16_nv)
418
419	ENTRY(atomic_or_32_nv)
420	ALTENTRY(atomic_or_uint_nv)
421	movl	(%rdi), %eax
4221:
423	movl	%esi, %ecx
424	orl	%eax, %ecx
425	lock
426	cmpxchgl %ecx, (%rdi)
427	jne	1b
428	movl	%ecx, %eax
429	ret
430	SET_SIZE(atomic_or_uint_nv)
431	SET_SIZE(atomic_or_32_nv)
432
433	ENTRY(atomic_or_64_nv)
434	ALTENTRY(atomic_or_ulong_nv)
435	movq	(%rdi), %rax
4361:
437	movq	%rsi, %rcx
438	orq	%rax, %rcx
439	lock
440	cmpxchgq %rcx, (%rdi)
441	jne	1b
442	movq	%rcx, %rax
443	ret
444	SET_SIZE(atomic_or_ulong_nv)
445	SET_SIZE(atomic_or_64_nv)
446
447	ENTRY(atomic_cas_8)
448	ALTENTRY(atomic_cas_uchar)
449	movzbl	%sil, %eax
450	lock
451	cmpxchgb %dl, (%rdi)
452	ret
453	SET_SIZE(atomic_cas_uchar)
454	SET_SIZE(atomic_cas_8)
455
456	ENTRY(atomic_cas_16)
457	ALTENTRY(atomic_cas_ushort)
458	movzwl	%si, %eax
459	lock
460	cmpxchgw %dx, (%rdi)
461	ret
462	SET_SIZE(atomic_cas_ushort)
463	SET_SIZE(atomic_cas_16)
464
465	ENTRY(atomic_cas_32)
466	ALTENTRY(atomic_cas_uint)
467	movl	%esi, %eax
468	lock
469	cmpxchgl %edx, (%rdi)
470	ret
471	SET_SIZE(atomic_cas_uint)
472	SET_SIZE(atomic_cas_32)
473
474	ENTRY(atomic_cas_64)
475	ALTENTRY(atomic_cas_ulong)
476	ALTENTRY(atomic_cas_ptr)
477	movq	%rsi, %rax
478	lock
479	cmpxchgq %rdx, (%rdi)
480	ret
481	SET_SIZE(atomic_cas_ptr)
482	SET_SIZE(atomic_cas_ulong)
483	SET_SIZE(atomic_cas_64)
484
485	ENTRY(atomic_swap_8)
486	ALTENTRY(atomic_swap_uchar)
487	movzbl	%sil, %eax
488	lock
489	xchgb %al, (%rdi)
490	ret
491	SET_SIZE(atomic_swap_uchar)
492	SET_SIZE(atomic_swap_8)
493
494	ENTRY(atomic_swap_16)
495	ALTENTRY(atomic_swap_ushort)
496	movzwl	%si, %eax
497	lock
498	xchgw %ax, (%rdi)
499	ret
500	SET_SIZE(atomic_swap_ushort)
501	SET_SIZE(atomic_swap_16)
502
503	ENTRY(atomic_swap_32)
504	ALTENTRY(atomic_swap_uint)
505	movl	%esi, %eax
506	lock
507	xchgl %eax, (%rdi)
508	ret
509	SET_SIZE(atomic_swap_uint)
510	SET_SIZE(atomic_swap_32)
511
512	ENTRY(atomic_swap_64)
513	ALTENTRY(atomic_swap_ulong)
514	ALTENTRY(atomic_swap_ptr)
515	movq	%rsi, %rax
516	lock
517	xchgq %rax, (%rdi)
518	ret
519	SET_SIZE(atomic_swap_ptr)
520	SET_SIZE(atomic_swap_ulong)
521	SET_SIZE(atomic_swap_64)
522
523	ENTRY(atomic_set_long_excl)
524	xorl	%eax, %eax
525	lock
526	btsq	%rsi, (%rdi)
527	jnc	1f
528	decl	%eax			/ return -1
5291:
530	ret
531	SET_SIZE(atomic_set_long_excl)
532
533	ENTRY(atomic_clear_long_excl)
534	xorl	%eax, %eax
535	lock
536	btrq	%rsi, (%rdi)
537	jc	1f
538	decl	%eax			/ return -1
5391:
540	ret
541	SET_SIZE(atomic_clear_long_excl)
542
543#if !defined(_KERNEL)
544
545	/*
546	 * NOTE: membar_enter, and membar_exit are identical routines.
547	 * We define them separately, instead of using an ALTENTRY
548	 * definitions to alias them together, so that DTrace and
549	 * debuggers will see a unique address for them, allowing
550	 * more accurate tracing.
551	*/
552
553	ENTRY(membar_enter)
554	mfence
555	ret
556	SET_SIZE(membar_enter)
557
558	ENTRY(membar_exit)
559	mfence
560	ret
561	SET_SIZE(membar_exit)
562
563	ENTRY(membar_producer)
564	sfence
565	ret
566	SET_SIZE(membar_producer)
567
568	ENTRY(membar_consumer)
569	lfence
570	ret
571	SET_SIZE(membar_consumer)
572
573#endif	/* !_KERNEL */
574