xref: /illumos-gate/usr/src/lib/libc/inc/thr_inlines.h (revision 1170833fec62773f21c42c8b6be7af4860f91451)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright 2007 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 
27 /*
28  * Copyright 2019 Joyent, Inc.
29  */
30 
31 #ifndef _THR_INLINES_H
32 #define	_THR_INLINES_H
33 
34 #include <sys/ccompile.h>
35 
36 #if !defined(__lint) && defined(__GNUC__)
37 
38 /* inlines for gcc */
39 
40 /*
41  * ON-usable GCC 4.x emits register pseudo-ops declaring %g7 as ignored, rather
42  * than scratch, GCC 3 does the reverse.  All uses, both ones it generated
43  * (_curthread) and ones it didn't (__curthread) must agree.
44  */
45 #if __GNUC__ > 3
46 #define	SPARC_REG_SPEC	"#ignore"
47 #else
48 #define	SPARC_REG_SPEC	"#scratch"
49 #endif
50 
51 extern __GNU_INLINE ulwp_t *
_curthread(void)52 _curthread(void)
53 {
54 #if defined(__amd64)
55 	ulwp_t *__value;
56 	__asm__ __volatile__("movq %%fs:0, %0" : "=r" (__value));
57 #elif defined(__i386)
58 	ulwp_t *__value;
59 	__asm__ __volatile__("movl %%gs:0, %0" : "=r" (__value));
60 #elif defined(__sparc)
61 	register ulwp_t *__value __asm__("g7");
62 #else
63 #error	"port me"
64 #endif
65 	return (__value);
66 }
67 
68 extern __GNU_INLINE ulwp_t *
__curthread(void)69 __curthread(void)
70 {
71 	ulwp_t *__value;
72 	__asm__ __volatile__(
73 #if defined(__amd64)
74 	    "movq %%fs:0, %0\n\t"
75 #elif defined(__i386)
76 	    "movl %%gs:0, %0\n\t"
77 #elif defined(__sparcv9)
78 	    ".register %%g7, " SPARC_REG_SPEC "\n\t"
79 	    "ldx [%%g7 + 80], %0\n\t"
80 #elif defined(__sparc)
81 	    ".register %%g7, " SPARC_REG_SPEC "\n\t"
82 	    "ld [%%g7 + 80], %0\n\t"
83 #else
84 #error	"port me"
85 #endif
86 	    : "=r" (__value));
87 	return (__value);
88 }
89 
90 extern __GNU_INLINE greg_t
stkptr(void)91 stkptr(void)
92 {
93 #if defined(__amd64)
94 	register greg_t __value __asm__("rsp");
95 #elif defined(__i386)
96 	register greg_t __value __asm__("esp");
97 #elif defined(__sparc)
98 	register greg_t __value __asm__("sp");
99 #else
100 #error	"port me"
101 #endif
102 	return (__value);
103 }
104 
105 extern __GNU_INLINE int
set_lock_byte(volatile uint8_t * __lockp)106 set_lock_byte(volatile uint8_t *__lockp)
107 {
108 	int __value = 0;
109 #if defined(__x86)
110 	__asm__ __volatile__(
111 	    "movl $1, %0\n\t"
112 	    "xchgb %%dl, %1"
113 	    : "+d" (__value), "+m" (*__lockp));
114 #elif defined(__sparc)
115 	__asm__ __volatile__(
116 	    "ldstub %1, %0\n\t"
117 	    "membar #LoadLoad"
118 	    : "=r" (__value), "+m" (*__lockp));
119 #else
120 #error	"port me"
121 #endif
122 	return (__value);
123 }
124 
125 extern __GNU_INLINE uint32_t
atomic_swap_32(volatile uint32_t * __memory,uint32_t __value)126 atomic_swap_32(volatile uint32_t *__memory, uint32_t __value)
127 {
128 #if defined(__x86)
129 	__asm__ __volatile__(
130 	    "xchgl %0, %1"
131 	    : "+q" (__value), "+m" (*__memory));
132 	return (__value);
133 #elif defined(__sparc)
134 	uint32_t __tmp1, __tmp2;
135 	__asm__ __volatile__(
136 	    "ld [%3], %0\n\t"
137 	    "1:\n\t"
138 	    "mov %4, %1\n\t"
139 	    "cas [%3], %0, %1\n\t"
140 	    "cmp %0, %1\n\t"
141 	    "bne,a,pn %%icc, 1b\n\t"
142 	    "  mov %1, %0"
143 	    : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
144 	    : "r" (__memory), "r" (__value)
145 	    : "cc");
146 	return (__tmp2);
147 #else
148 #error	"port me"
149 #endif
150 }
151 
152 extern __GNU_INLINE uint32_t
atomic_cas_32(volatile uint32_t * __memory,uint32_t __cmp,uint32_t __newvalue)153 atomic_cas_32(volatile uint32_t *__memory, uint32_t __cmp, uint32_t __newvalue)
154 {
155 	uint32_t __oldvalue;
156 #if defined(__x86)
157 	__asm__ __volatile__(
158 	    "lock; cmpxchgl %3, %0"
159 	    : "=m" (*__memory), "=a" (__oldvalue)
160 	    : "a" (__cmp), "r" (__newvalue));
161 #elif defined(__sparc)
162 	__asm__ __volatile__(
163 	    "cas [%2], %3, %1"
164 	    : "=m" (*__memory), "=&r" (__oldvalue)
165 	    : "r" (__memory), "r" (__cmp), "1" (__newvalue));
166 #else
167 #error	"port me"
168 #endif
169 	return (__oldvalue);
170 }
171 
172 extern __GNU_INLINE void
atomic_inc_32(volatile uint32_t * __memory)173 atomic_inc_32(volatile uint32_t *__memory)
174 {
175 #if defined(__x86)
176 	__asm__ __volatile__(
177 	    "lock; incl %0"
178 	    : "+m" (*__memory));
179 #elif defined(__sparc)
180 	uint32_t __tmp1, __tmp2;
181 	__asm__ __volatile__(
182 	    "ld [%3], %0\n\t"
183 	    "1:\n\t"
184 	    "add %0, 1, %1\n\t"
185 	    "cas [%3], %0, %1\n\t"
186 	    "cmp %0, %1\n\t"
187 	    "bne,a,pn %%icc, 1b\n\t"
188 	    "  mov %1, %0"
189 	    : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
190 	    : "r" (__memory)
191 	    : "cc");
192 #else
193 #error	"port me"
194 #endif
195 }
196 
197 extern __GNU_INLINE void
atomic_dec_32(volatile uint32_t * __memory)198 atomic_dec_32(volatile uint32_t *__memory)
199 {
200 #if defined(__x86)
201 	__asm__ __volatile__(
202 	    "lock; decl %0"
203 	    : "+m" (*__memory));
204 #elif defined(__sparc)
205 	uint32_t __tmp1, __tmp2;
206 	__asm__ __volatile__(
207 	    "ld [%3], %0\n\t"
208 	    "1:\n\t"
209 	    "sub %0, 1, %1\n\t"
210 	    "cas [%3], %0, %1\n\t"
211 	    "cmp %0, %1\n\t"
212 	    "bne,a,pn %%icc, 1b\n\t"
213 	    "  mov %1, %0"
214 	    : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
215 	    : "r" (__memory)
216 	    : "cc");
217 #else
218 #error	"port me"
219 #endif
220 }
221 
222 extern __GNU_INLINE void
atomic_and_32(volatile uint32_t * __memory,uint32_t __bits)223 atomic_and_32(volatile uint32_t *__memory, uint32_t __bits)
224 {
225 #if defined(__x86)
226 	__asm__ __volatile__(
227 	    "lock; andl %1, %0"
228 	    : "+m" (*__memory)
229 	    : "r" (__bits));
230 #elif defined(__sparc)
231 	uint32_t __tmp1, __tmp2;
232 	__asm__ __volatile__(
233 	    "ld [%3], %0\n\t"
234 	    "1:\n\t"
235 	    "and %0, %4, %1\n\t"
236 	    "cas [%3], %0, %1\n\t"
237 	    "cmp %0, %1\n\t"
238 	    "bne,a,pn %%icc, 1b\n\t"
239 	    "  mov %1, %0"
240 	    : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
241 	    : "r" (__memory), "r" (__bits)
242 	    : "cc");
243 #else
244 #error	"port me"
245 #endif
246 }
247 
248 extern __GNU_INLINE void
atomic_or_32(volatile uint32_t * __memory,uint32_t __bits)249 atomic_or_32(volatile uint32_t *__memory, uint32_t __bits)
250 {
251 #if defined(__x86)
252 	__asm__ __volatile__(
253 	    "lock; orl %1, %0"
254 	    : "+m" (*__memory)
255 	    : "r" (__bits));
256 #elif defined(__sparc)
257 	uint32_t __tmp1, __tmp2;
258 	__asm__ __volatile__(
259 	    "ld [%3], %0\n\t"
260 	    "1:\n\t"
261 	    "or %0, %4, %1\n\t"
262 	    "cas [%3], %0, %1\n\t"
263 	    "cmp %0, %1\n\t"
264 	    "bne,a,pn %%icc, 1b\n\t"
265 	    "  mov %1, %0"
266 	    : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
267 	    : "r" (__memory), "r" (__bits)
268 	    : "cc");
269 #else
270 #error	"port me"
271 #endif
272 }
273 
274 #if defined(__sparc)	/* only needed on sparc */
275 
276 extern __GNU_INLINE ulong_t
caller(void)277 caller(void)
278 {
279 	register ulong_t __value __asm__("i7");
280 	return (__value);
281 }
282 
283 extern __GNU_INLINE ulong_t
getfp(void)284 getfp(void)
285 {
286 	register ulong_t __value __asm__("fp");
287 	return (__value);
288 }
289 
290 #endif	/* __sparc */
291 
292 #if defined(__x86)	/* only needed on x86 */
293 
294 extern __GNU_INLINE void
ht_pause(void)295 ht_pause(void)
296 {
297 	__asm__ __volatile__("rep; nop");
298 }
299 
300 #endif	/* __x86 */
301 
302 #endif	/* !__lint && __GNUC__ */
303 
304 #endif	/* _THR_INLINES_H */
305