1 /*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21
22 /*
23 * Copyright 2007 Sun Microsystems, Inc. All rights reserved.
24 * Use is subject to license terms.
25 */
26
27 /*
28 * Copyright 2019 Joyent, Inc.
29 */
30
31 #ifndef _THR_INLINES_H
32 #define _THR_INLINES_H
33
34 #include <sys/ccompile.h>
35
36 #if !defined(__lint) && defined(__GNUC__)
37
38 /* inlines for gcc */
39
40 /*
41 * ON-usable GCC 4.x emits register pseudo-ops declaring %g7 as ignored, rather
42 * than scratch, GCC 3 does the reverse. All uses, both ones it generated
43 * (_curthread) and ones it didn't (__curthread) must agree.
44 */
45 #if __GNUC__ > 3
46 #define SPARC_REG_SPEC "#ignore"
47 #else
48 #define SPARC_REG_SPEC "#scratch"
49 #endif
50
51 extern __GNU_INLINE ulwp_t *
_curthread(void)52 _curthread(void)
53 {
54 #if defined(__amd64)
55 ulwp_t *__value;
56 __asm__ __volatile__("movq %%fs:0, %0" : "=r" (__value));
57 #elif defined(__i386)
58 ulwp_t *__value;
59 __asm__ __volatile__("movl %%gs:0, %0" : "=r" (__value));
60 #elif defined(__sparc)
61 register ulwp_t *__value __asm__("g7");
62 #else
63 #error "port me"
64 #endif
65 return (__value);
66 }
67
68 extern __GNU_INLINE ulwp_t *
__curthread(void)69 __curthread(void)
70 {
71 ulwp_t *__value;
72 __asm__ __volatile__(
73 #if defined(__amd64)
74 "movq %%fs:0, %0\n\t"
75 #elif defined(__i386)
76 "movl %%gs:0, %0\n\t"
77 #elif defined(__sparcv9)
78 ".register %%g7, " SPARC_REG_SPEC "\n\t"
79 "ldx [%%g7 + 80], %0\n\t"
80 #elif defined(__sparc)
81 ".register %%g7, " SPARC_REG_SPEC "\n\t"
82 "ld [%%g7 + 80], %0\n\t"
83 #else
84 #error "port me"
85 #endif
86 : "=r" (__value));
87 return (__value);
88 }
89
90 extern __GNU_INLINE greg_t
stkptr(void)91 stkptr(void)
92 {
93 #if defined(__amd64)
94 register greg_t __value __asm__("rsp");
95 #elif defined(__i386)
96 register greg_t __value __asm__("esp");
97 #elif defined(__sparc)
98 register greg_t __value __asm__("sp");
99 #else
100 #error "port me"
101 #endif
102 return (__value);
103 }
104
105 extern __GNU_INLINE hrtime_t
gethrtime(void)106 gethrtime(void) /* note: caller-saved registers are trashed */
107 {
108 #if defined(__amd64)
109 hrtime_t __value;
110 __asm__ __volatile__(
111 "movl $3, %%eax\n\t"
112 "int $0xd2"
113 : "=a" (__value)
114 : : "rcx", "rdx", "rsi", "rdi", "r8", "r9", "r10", "r11", "cc");
115 #elif defined(__i386)
116 hrtime_t __value;
117 __asm__ __volatile__(
118 "movl $3, %%eax\n\t"
119 "int $0xd2"
120 : "=A" (__value)
121 : : "ecx", "cc");
122 #elif defined(__sparcv9)
123 register hrtime_t __value __asm__("o0");
124 __asm__ __volatile__(
125 "ta 0x24\n\t"
126 "sllx %%o0, 32, %0\n\t"
127 "or %%o1, %0, %0"
128 : "=r" (__value)
129 : : "o1", "o2", "o3", "o4", "o5", "cc");
130 #elif defined(__sparc)
131 register hrtime_t __value __asm__("o0");
132 __asm__ __volatile__(
133 "ta 0x24"
134 : "=r" (__value)
135 : : "o2", "o3", "o4", "o5", "cc");
136 #else
137 #error "port me"
138 #endif
139 return (__value);
140 }
141
142 extern __GNU_INLINE int
set_lock_byte(volatile uint8_t * __lockp)143 set_lock_byte(volatile uint8_t *__lockp)
144 {
145 int __value = 0;
146 #if defined(__x86)
147 __asm__ __volatile__(
148 "movl $1, %0\n\t"
149 "xchgb %%dl, %1"
150 : "+d" (__value), "+m" (*__lockp));
151 #elif defined(__sparc)
152 __asm__ __volatile__(
153 "ldstub %1, %0\n\t"
154 "membar #LoadLoad"
155 : "=r" (__value), "+m" (*__lockp));
156 #else
157 #error "port me"
158 #endif
159 return (__value);
160 }
161
162 extern __GNU_INLINE uint32_t
atomic_swap_32(volatile uint32_t * __memory,uint32_t __value)163 atomic_swap_32(volatile uint32_t *__memory, uint32_t __value)
164 {
165 #if defined(__x86)
166 __asm__ __volatile__(
167 "xchgl %0, %1"
168 : "+q" (__value), "+m" (*__memory));
169 return (__value);
170 #elif defined(__sparc)
171 uint32_t __tmp1, __tmp2;
172 __asm__ __volatile__(
173 "ld [%3], %0\n\t"
174 "1:\n\t"
175 "mov %4, %1\n\t"
176 "cas [%3], %0, %1\n\t"
177 "cmp %0, %1\n\t"
178 "bne,a,pn %%icc, 1b\n\t"
179 " mov %1, %0"
180 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
181 : "r" (__memory), "r" (__value)
182 : "cc");
183 return (__tmp2);
184 #else
185 #error "port me"
186 #endif
187 }
188
189 extern __GNU_INLINE uint32_t
atomic_cas_32(volatile uint32_t * __memory,uint32_t __cmp,uint32_t __newvalue)190 atomic_cas_32(volatile uint32_t *__memory, uint32_t __cmp, uint32_t __newvalue)
191 {
192 uint32_t __oldvalue;
193 #if defined(__x86)
194 __asm__ __volatile__(
195 "lock; cmpxchgl %3, %0"
196 : "=m" (*__memory), "=a" (__oldvalue)
197 : "a" (__cmp), "r" (__newvalue));
198 #elif defined(__sparc)
199 __asm__ __volatile__(
200 "cas [%2], %3, %1"
201 : "=m" (*__memory), "=&r" (__oldvalue)
202 : "r" (__memory), "r" (__cmp), "1" (__newvalue));
203 #else
204 #error "port me"
205 #endif
206 return (__oldvalue);
207 }
208
209 extern __GNU_INLINE void
atomic_inc_32(volatile uint32_t * __memory)210 atomic_inc_32(volatile uint32_t *__memory)
211 {
212 #if defined(__x86)
213 __asm__ __volatile__(
214 "lock; incl %0"
215 : "+m" (*__memory));
216 #elif defined(__sparc)
217 uint32_t __tmp1, __tmp2;
218 __asm__ __volatile__(
219 "ld [%3], %0\n\t"
220 "1:\n\t"
221 "add %0, 1, %1\n\t"
222 "cas [%3], %0, %1\n\t"
223 "cmp %0, %1\n\t"
224 "bne,a,pn %%icc, 1b\n\t"
225 " mov %1, %0"
226 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
227 : "r" (__memory)
228 : "cc");
229 #else
230 #error "port me"
231 #endif
232 }
233
234 extern __GNU_INLINE void
atomic_dec_32(volatile uint32_t * __memory)235 atomic_dec_32(volatile uint32_t *__memory)
236 {
237 #if defined(__x86)
238 __asm__ __volatile__(
239 "lock; decl %0"
240 : "+m" (*__memory));
241 #elif defined(__sparc)
242 uint32_t __tmp1, __tmp2;
243 __asm__ __volatile__(
244 "ld [%3], %0\n\t"
245 "1:\n\t"
246 "sub %0, 1, %1\n\t"
247 "cas [%3], %0, %1\n\t"
248 "cmp %0, %1\n\t"
249 "bne,a,pn %%icc, 1b\n\t"
250 " mov %1, %0"
251 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
252 : "r" (__memory)
253 : "cc");
254 #else
255 #error "port me"
256 #endif
257 }
258
259 extern __GNU_INLINE void
atomic_and_32(volatile uint32_t * __memory,uint32_t __bits)260 atomic_and_32(volatile uint32_t *__memory, uint32_t __bits)
261 {
262 #if defined(__x86)
263 __asm__ __volatile__(
264 "lock; andl %1, %0"
265 : "+m" (*__memory)
266 : "r" (__bits));
267 #elif defined(__sparc)
268 uint32_t __tmp1, __tmp2;
269 __asm__ __volatile__(
270 "ld [%3], %0\n\t"
271 "1:\n\t"
272 "and %0, %4, %1\n\t"
273 "cas [%3], %0, %1\n\t"
274 "cmp %0, %1\n\t"
275 "bne,a,pn %%icc, 1b\n\t"
276 " mov %1, %0"
277 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
278 : "r" (__memory), "r" (__bits)
279 : "cc");
280 #else
281 #error "port me"
282 #endif
283 }
284
285 extern __GNU_INLINE void
atomic_or_32(volatile uint32_t * __memory,uint32_t __bits)286 atomic_or_32(volatile uint32_t *__memory, uint32_t __bits)
287 {
288 #if defined(__x86)
289 __asm__ __volatile__(
290 "lock; orl %1, %0"
291 : "+m" (*__memory)
292 : "r" (__bits));
293 #elif defined(__sparc)
294 uint32_t __tmp1, __tmp2;
295 __asm__ __volatile__(
296 "ld [%3], %0\n\t"
297 "1:\n\t"
298 "or %0, %4, %1\n\t"
299 "cas [%3], %0, %1\n\t"
300 "cmp %0, %1\n\t"
301 "bne,a,pn %%icc, 1b\n\t"
302 " mov %1, %0"
303 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
304 : "r" (__memory), "r" (__bits)
305 : "cc");
306 #else
307 #error "port me"
308 #endif
309 }
310
311 #if defined(__sparc) /* only needed on sparc */
312
313 extern __GNU_INLINE ulong_t
caller(void)314 caller(void)
315 {
316 register ulong_t __value __asm__("i7");
317 return (__value);
318 }
319
320 extern __GNU_INLINE ulong_t
getfp(void)321 getfp(void)
322 {
323 register ulong_t __value __asm__("fp");
324 return (__value);
325 }
326
327 #endif /* __sparc */
328
329 #if defined(__x86) /* only needed on x86 */
330
331 extern __GNU_INLINE void
ht_pause(void)332 ht_pause(void)
333 {
334 __asm__ __volatile__("rep; nop");
335 }
336
337 #endif /* __x86 */
338
339 #endif /* !__lint && __GNUC__ */
340
341 #endif /* _THR_INLINES_H */
342