xref: /linux/arch/x86/lib/atomic64_cx8_32.S (revision c532de5a67a70f8533d495f8f2aaa9a0491c3ad0)
1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * atomic64_t for 586+
4 *
5 * Copyright © 2010  Luca Barbieri
6 */
7
8#include <linux/linkage.h>
9#include <asm/alternative.h>
10
11.macro read64 reg
12	movl %ebx, %eax
13	movl %ecx, %edx
14/* we need LOCK_PREFIX since otherwise cmpxchg8b always does the write */
15	LOCK_PREFIX
16	cmpxchg8b (\reg)
17.endm
18
19.macro read64_nonatomic reg
20	movl (\reg), %eax
21	movl 4(\reg), %edx
22.endm
23
24SYM_FUNC_START(atomic64_read_cx8)
25	read64 %ecx
26	RET
27SYM_FUNC_END(atomic64_read_cx8)
28
29SYM_FUNC_START(atomic64_set_cx8)
301:
31/* we don't need LOCK_PREFIX since aligned 64-bit writes
32 * are atomic on 586 and newer */
33	cmpxchg8b (%esi)
34	jne 1b
35
36	RET
37SYM_FUNC_END(atomic64_set_cx8)
38
39SYM_FUNC_START(atomic64_xchg_cx8)
401:
41	LOCK_PREFIX
42	cmpxchg8b (%esi)
43	jne 1b
44
45	RET
46SYM_FUNC_END(atomic64_xchg_cx8)
47
48.macro addsub_return func ins insc
49SYM_FUNC_START(atomic64_\func\()_return_cx8)
50	pushl %ebp
51	pushl %ebx
52	pushl %esi
53	pushl %edi
54
55	movl %eax, %esi
56	movl %edx, %edi
57	movl %ecx, %ebp
58
59	read64_nonatomic %ecx
601:
61	movl %eax, %ebx
62	movl %edx, %ecx
63	\ins\()l %esi, %ebx
64	\insc\()l %edi, %ecx
65	LOCK_PREFIX
66	cmpxchg8b (%ebp)
67	jne 1b
68
6910:
70	movl %ebx, %eax
71	movl %ecx, %edx
72	popl %edi
73	popl %esi
74	popl %ebx
75	popl %ebp
76	RET
77SYM_FUNC_END(atomic64_\func\()_return_cx8)
78.endm
79
80addsub_return add add adc
81addsub_return sub sub sbb
82
83.macro incdec_return func ins insc
84SYM_FUNC_START(atomic64_\func\()_return_cx8)
85	pushl %ebx
86
87	read64_nonatomic %esi
881:
89	movl %eax, %ebx
90	movl %edx, %ecx
91	\ins\()l $1, %ebx
92	\insc\()l $0, %ecx
93	LOCK_PREFIX
94	cmpxchg8b (%esi)
95	jne 1b
96
9710:
98	movl %ebx, %eax
99	movl %ecx, %edx
100	popl %ebx
101	RET
102SYM_FUNC_END(atomic64_\func\()_return_cx8)
103.endm
104
105incdec_return inc add adc
106incdec_return dec sub sbb
107
108SYM_FUNC_START(atomic64_dec_if_positive_cx8)
109	pushl %ebx
110
111	read64 %esi
1121:
113	movl %eax, %ebx
114	movl %edx, %ecx
115	subl $1, %ebx
116	sbb $0, %ecx
117	js 2f
118	LOCK_PREFIX
119	cmpxchg8b (%esi)
120	jne 1b
121
1222:
123	movl %ebx, %eax
124	movl %ecx, %edx
125	popl %ebx
126	RET
127SYM_FUNC_END(atomic64_dec_if_positive_cx8)
128
129SYM_FUNC_START(atomic64_add_unless_cx8)
130	pushl %ebp
131	pushl %ebx
132/* these just push these two parameters on the stack */
133	pushl %edi
134	pushl %ecx
135
136	movl %eax, %ebp
137	movl %edx, %edi
138
139	read64 %esi
1401:
141	cmpl %eax, 0(%esp)
142	je 4f
1432:
144	movl %eax, %ebx
145	movl %edx, %ecx
146	addl %ebp, %ebx
147	adcl %edi, %ecx
148	LOCK_PREFIX
149	cmpxchg8b (%esi)
150	jne 1b
151
152	movl $1, %eax
1533:
154	addl $8, %esp
155	popl %ebx
156	popl %ebp
157	RET
1584:
159	cmpl %edx, 4(%esp)
160	jne 2b
161	xorl %eax, %eax
162	jmp 3b
163SYM_FUNC_END(atomic64_add_unless_cx8)
164
165SYM_FUNC_START(atomic64_inc_not_zero_cx8)
166	pushl %ebx
167
168	read64 %esi
1691:
170	movl %eax, %ecx
171	orl %edx, %ecx
172	jz 3f
173	movl %eax, %ebx
174	xorl %ecx, %ecx
175	addl $1, %ebx
176	adcl %edx, %ecx
177	LOCK_PREFIX
178	cmpxchg8b (%esi)
179	jne 1b
180
181	movl $1, %eax
1823:
183	popl %ebx
184	RET
185SYM_FUNC_END(atomic64_inc_not_zero_cx8)
186