1 /*
2 * Copyright 2009-2016 Samy Al Bahra.
3 * Copyright 2013-2016 Olivier Houchard.
4 * Copyright 2016 Alexey Kopytov.
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
17 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
20 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 * SUCH DAMAGE.
27 */
28
29 #ifndef CK_PR_AARCH64_LSE_H
30 #define CK_PR_AARCH64_LSE_H
31
32 #error bite
33 #ifndef CK_PR_H
34 #error Do not include this file directly, use ck_pr.h
35 #endif
36
37 CK_CC_INLINE static bool
ck_pr_cas_64_2_value(uint64_t target[2],uint64_t compare[2],uint64_t set[2],uint64_t value[2])38 ck_pr_cas_64_2_value(uint64_t target[2], uint64_t compare[2], uint64_t set[2], uint64_t value[2])
39 {
40 uint64_t tmp1;
41 uint64_t tmp2;
42 register uint64_t x0 __asm__ ("x0") = compare[0];
43 register uint64_t x1 __asm__ ("x1") = compare[1];
44 register uint64_t x2 __asm__ ("x2") = set[0];
45 register uint64_t x3 __asm__ ("x3") = set[1];
46
47 __asm__ __volatile__("casp %0, %1, %4, %5, [%6]\n"
48 "eor %2, %0, %7\n"
49 "eor %3, %1, %8\n"
50 "orr %2, %2, %3\n"
51 : "+&r" (x0), "+&r" (x1), "=&r" (tmp1), "=&r" (tmp2)
52 : "r" (x2), "r" (x3), "r" (target), "r" (compare[0]), "r" (compare[1])
53 : "memory");
54
55 value[0] = x0;
56 value[1] = x1;
57
58 return (!!tmp1);
59 }
60
61 CK_CC_INLINE static bool
ck_pr_cas_ptr_2_value(void * target,void * compare,void * set,void * value)62 ck_pr_cas_ptr_2_value(void *target, void *compare, void *set, void *value)
63 {
64 return (ck_pr_cas_64_2_value(CK_CPP_CAST(uint64_t *, target),
65 CK_CPP_CAST(uint64_t *, compare),
66 CK_CPP_CAST(uint64_t *, set),
67 CK_CPP_CAST(uint64_t *, value)));
68 }
69
70 CK_CC_INLINE static bool
ck_pr_cas_64_2(uint64_t target[2],uint64_t compare[2],uint64_t set[2])71 ck_pr_cas_64_2(uint64_t target[2], uint64_t compare[2], uint64_t set[2])
72 {
73 register uint64_t x0 __asm__ ("x0") = compare[0];
74 register uint64_t x1 __asm__ ("x1") = compare[1];
75 register uint64_t x2 __asm__ ("x2") = set[0];
76 register uint64_t x3 __asm__ ("x3") = set[1];
77
78 __asm__ __volatile__("casp %0, %1, %2, %3, [%4]\n"
79 "eor %0, %0, %5\n"
80 "eor %1, %1, %6\n"
81 "orr %0, %0, %1\n"
82 : "+&r" (x0), "+&r" (x1)
83 : "r" (x2), "r" (x3), "r" (target), "r" (compare[0]), "r" (compare[1])
84 : "memory");
85
86 return (!!x0);
87 }
88 CK_CC_INLINE static bool
ck_pr_cas_ptr_2(void * target,void * compare,void * set)89 ck_pr_cas_ptr_2(void *target, void *compare, void *set)
90 {
91 return (ck_pr_cas_64_2(CK_CPP_CAST(uint64_t *, target),
92 CK_CPP_CAST(uint64_t *, compare),
93 CK_CPP_CAST(uint64_t *, set)));
94 }
95
96
97 #define CK_PR_CAS(N, M, T, W, R) \
98 CK_CC_INLINE static bool \
99 ck_pr_cas_##N##_value(M *target, T compare, T set, M *value) \
100 { \
101 *(T *)value = compare; \
102 __asm__ __volatile__( \
103 "cas" W " %" R "0, %" R "2, [%1]\n"\
104 : "+&r" (*(T *)value) \
105 : "r" (target), \
106 "r" (set) \
107 : "memory"); \
108 return (*(T *)value == compare); \
109 } \
110 CK_CC_INLINE static bool \
111 ck_pr_cas_##N(M *target, T compare, T set) \
112 { \
113 T previous = compare; \
114 __asm__ __volatile__( \
115 "cas" W " %" R "0, %" R "2, [%1]\n"\
116 : "+&r" (previous) \
117 : "r" (target), \
118 "r" (set) \
119 : "memory"); \
120 return (previous == compare); \
121 }
122
123 CK_PR_CAS(ptr, void, void *, "", "")
124
125 #define CK_PR_CAS_S(N, M, W, R) CK_PR_CAS(N, M, M, W, R)
126 CK_PR_CAS_S(64, uint64_t, "", "")
127 #ifndef CK_PR_DISABLE_DOUBLE
128 CK_PR_CAS_S(double, double, "", "")
129 #endif
130 CK_PR_CAS_S(32, uint32_t, "", "w")
131 CK_PR_CAS_S(uint, unsigned int, "", "w")
132 CK_PR_CAS_S(int, int, "", "w")
133 CK_PR_CAS_S(16, uint16_t, "h", "w")
134 CK_PR_CAS_S(8, uint8_t, "b", "w")
135 CK_PR_CAS_S(short, short, "h", "w")
136 CK_PR_CAS_S(char, char, "b", "w")
137
138
139 #undef CK_PR_CAS_S
140 #undef CK_PR_CAS
141
142 #define CK_PR_FAS(N, M, T, W, R) \
143 CK_CC_INLINE static T \
144 ck_pr_fas_##N(M *target, T v) \
145 { \
146 T previous; \
147 __asm__ __volatile__( \
148 "swp" W " %" R "2, %" R "0, [%1]\n"\
149 : "=&r" (previous) \
150 : "r" (target), \
151 "r" (v) \
152 : "memory"); \
153 return (previous); \
154 }
155
156 CK_PR_FAS(64, uint64_t, uint64_t, "", "")
157 CK_PR_FAS(32, uint32_t, uint32_t, "", "w")
158 CK_PR_FAS(ptr, void, void *, "", "")
159 CK_PR_FAS(int, int, int, "", "w")
160 CK_PR_FAS(uint, unsigned int, unsigned int, "", "w")
161 CK_PR_FAS(16, uint16_t, uint16_t, "h", "w")
162 CK_PR_FAS(8, uint8_t, uint8_t, "b", "w")
163 CK_PR_FAS(short, short, short, "h", "w")
164 CK_PR_FAS(char, char, char, "b", "w")
165
166
167 #undef CK_PR_FAS
168
169 #define CK_PR_UNARY(O, N, M, T, I, W, R, S) \
170 CK_CC_INLINE static void \
171 ck_pr_##O##_##N(M *target) \
172 { \
173 __asm__ __volatile__(I "\n" \
174 "st" S W " " R "0, [%0]\n" \
175 : \
176 : "r" (target) \
177 : "x0", "memory"); \
178 return; \
179 }
180
181 CK_PR_UNARY(inc, ptr, void, void *, "mov x0, 1", "", "x", "add")
182 CK_PR_UNARY(dec, ptr, void, void *, "mov x0, -1", "", "x", "add")
183 CK_PR_UNARY(not, ptr, void, void *, "mov x0, -1", "", "x", "eor")
184 CK_PR_UNARY(inc, 64, uint64_t, uint64_t, "mov x0, 1", "", "x", "add")
185 CK_PR_UNARY(dec, 64, uint64_t, uint64_t, "mov x0, -1", "", "x", "add")
186 CK_PR_UNARY(not, 64, uint64_t, uint64_t, "mov x0, -1", "", "x", "eor")
187
188 #define CK_PR_UNARY_S(S, T, W) \
189 CK_PR_UNARY(inc, S, T, T, "mov w0, 1", W, "w", "add") \
190 CK_PR_UNARY(dec, S, T, T, "mov w0, -1", W, "w", "add") \
191 CK_PR_UNARY(not, S, T, T, "mov w0, -1", W, "w", "eor") \
192
193 CK_PR_UNARY_S(32, uint32_t, "")
194 CK_PR_UNARY_S(uint, unsigned int, "")
195 CK_PR_UNARY_S(int, int, "")
196 CK_PR_UNARY_S(16, uint16_t, "h")
197 CK_PR_UNARY_S(8, uint8_t, "b")
198 CK_PR_UNARY_S(short, short, "h")
199 CK_PR_UNARY_S(char, char, "b")
200
201 #undef CK_PR_UNARY_S
202 #undef CK_PR_UNARY
203
204 #define CK_PR_BINARY(O, N, M, T, S, W, R, I) \
205 CK_CC_INLINE static void \
206 ck_pr_##O##_##N(M *target, T delta) \
207 { \
208 __asm__ __volatile__(I "\n" \
209 "st" S W " %" R "0, [%1]\n"\
210 : "+&r" (delta) \
211 : "r" (target) \
212 : "memory"); \
213 return; \
214 }
215
216 CK_PR_BINARY(and, ptr, void, uintptr_t, "clr", "", "", "mvn %0, %0")
217 CK_PR_BINARY(add, ptr, void, uintptr_t, "add", "", "", "")
218 CK_PR_BINARY(or, ptr, void, uintptr_t, "set", "", "", "")
219 CK_PR_BINARY(sub, ptr, void, uintptr_t, "add", "", "", "neg %0, %0")
220 CK_PR_BINARY(xor, ptr, void, uintptr_t, "eor", "", "", "")
221 CK_PR_BINARY(and, 64, uint64_t, uint64_t, "clr", "", "", "mvn %0, %0")
222 CK_PR_BINARY(add, 64, uint64_t, uint64_t, "add", "", "", "")
223 CK_PR_BINARY(or, 64, uint64_t, uint64_t, "set", "", "", "")
224 CK_PR_BINARY(sub, 64, uint64_t, uint64_t, "add", "", "", "neg %0, %0")
225 CK_PR_BINARY(xor, 64, uint64_t, uint64_t, "eor", "", "", "")
226
227 #define CK_PR_BINARY_S(S, T, W) \
228 CK_PR_BINARY(and, S, T, T, "clr", W, "w", "mvn %w0, %w0") \
229 CK_PR_BINARY(add, S, T, T, "add", W, "w", "") \
230 CK_PR_BINARY(or, S, T, T, "set", W, "w", "") \
231 CK_PR_BINARY(sub, S, T, T, "add", W, "w", "neg %w0, %w0") \
232 CK_PR_BINARY(xor, S, T, T, "eor", W, "w", "")
233
234 CK_PR_BINARY_S(32, uint32_t, "")
235 CK_PR_BINARY_S(uint, unsigned int, "")
236 CK_PR_BINARY_S(int, int, "")
237 CK_PR_BINARY_S(16, uint16_t, "h")
238 CK_PR_BINARY_S(8, uint8_t, "b")
239 CK_PR_BINARY_S(short, short, "h")
240 CK_PR_BINARY_S(char, char, "b")
241
242 #undef CK_PR_BINARY_S
243 #undef CK_PR_BINARY
244
245 CK_CC_INLINE static void *
ck_pr_faa_ptr(void * target,uintptr_t delta)246 ck_pr_faa_ptr(void *target, uintptr_t delta)
247 {
248 uintptr_t previous;
249
250 __asm__ __volatile__(
251 "ldadd %2, %0, [%1]\n"
252 : "=r" (previous)
253 : "r" (target),
254 "r" (delta)
255 : "memory");
256
257 return (void *)(previous);
258 }
259
260 CK_CC_INLINE static uint64_t
ck_pr_faa_64(uint64_t * target,uint64_t delta)261 ck_pr_faa_64(uint64_t *target, uint64_t delta)
262 {
263 uint64_t previous;
264
265 __asm__ __volatile__(
266 "ldadd %2, %0, [%1]\n"
267 : "=r" (previous)
268 : "r" (target),
269 "r" (delta)
270 : "memory");
271
272 return (previous);
273 }
274
275 #define CK_PR_FAA(S, T, W) \
276 CK_CC_INLINE static T \
277 ck_pr_faa_##S(T *target, T delta) \
278 { \
279 T previous; \
280 __asm__ __volatile__( \
281 "ldadd" W " %w2, %w0, [%1]\n" \
282 : "=r" (previous) \
283 : "r" (target), \
284 "r" (delta) \
285 : "memory"); \
286 return (previous); \
287 }
288
289 CK_PR_FAA(32, uint32_t, "")
290 CK_PR_FAA(uint, unsigned int, "")
291 CK_PR_FAA(int, int, "")
292 CK_PR_FAA(16, uint16_t, "h")
293 CK_PR_FAA(8, uint8_t, "b")
294 CK_PR_FAA(short, short, "h")
295 CK_PR_FAA(char, char, "b")
296
297 #undef CK_PR_FAA
298
299 #endif /* CK_PR_AARCH64_LSE_H */
300