1 /*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21
22 /*
23 * Copyright 2007 Sun Microsystems, Inc. All rights reserved.
24 * Use is subject to license terms.
25 */
26
27 #ifndef _THR_INLINES_H
28 #define _THR_INLINES_H
29
30 #include <sys/ccompile.h>
31
32 #if !defined(__lint) && defined(__GNUC__)
33
34 /* inlines for gcc */
35
36 /*
37 * ON-usable GCC 4.x emits register pseudo-ops declaring %g7 as ignored, rather
38 * than scratch, GCC 3 does the reverse. All uses, both ones it generated
39 * (_curthread) and ones it didn't (__curthread) must agree.
40 */
41 #if __GNUC__ > 3
42 #define SPARC_REG_SPEC "#ignore"
43 #else
44 #define SPARC_REG_SPEC "#scratch"
45 #endif
46
47 extern __GNU_INLINE ulwp_t *
_curthread(void)48 _curthread(void)
49 {
50 #if defined(__amd64)
51 ulwp_t *__value;
52 __asm__ __volatile__("movq %%fs:0, %0" : "=r" (__value));
53 #elif defined(__i386)
54 ulwp_t *__value;
55 __asm__ __volatile__("movl %%gs:0, %0" : "=r" (__value));
56 #elif defined(__sparc)
57 register ulwp_t *__value __asm__("g7");
58 #else
59 #error "port me"
60 #endif
61 return (__value);
62 }
63
64 extern __GNU_INLINE ulwp_t *
__curthread(void)65 __curthread(void)
66 {
67 ulwp_t *__value;
68 __asm__ __volatile__(
69 #if defined(__amd64)
70 "movq %%fs:0, %0\n\t"
71 #elif defined(__i386)
72 "movl %%gs:0, %0\n\t"
73 #elif defined(__sparcv9)
74 ".register %%g7, " SPARC_REG_SPEC "\n\t"
75 "ldx [%%g7 + 80], %0\n\t"
76 #elif defined(__sparc)
77 ".register %%g7, " SPARC_REG_SPEC "\n\t"
78 "ld [%%g7 + 80], %0\n\t"
79 #else
80 #error "port me"
81 #endif
82 : "=r" (__value));
83 return (__value);
84 }
85
86 extern __GNU_INLINE greg_t
stkptr(void)87 stkptr(void)
88 {
89 #if defined(__amd64)
90 register greg_t __value __asm__("rsp");
91 #elif defined(__i386)
92 register greg_t __value __asm__("esp");
93 #elif defined(__sparc)
94 register greg_t __value __asm__("sp");
95 #else
96 #error "port me"
97 #endif
98 return (__value);
99 }
100
101 extern __GNU_INLINE hrtime_t
gethrtime(void)102 gethrtime(void) /* note: caller-saved registers are trashed */
103 {
104 #if defined(__amd64)
105 hrtime_t __value;
106 __asm__ __volatile__(
107 "movl $3, %%eax\n\t"
108 "int $0xd2"
109 : "=a" (__value)
110 : : "rcx", "rdx", "rsi", "rdi", "r8", "r9", "r10", "r11", "cc");
111 #elif defined(__i386)
112 hrtime_t __value;
113 __asm__ __volatile__(
114 "movl $3, %%eax\n\t"
115 "int $0xd2"
116 : "=A" (__value)
117 : : "ecx", "cc");
118 #elif defined(__sparcv9)
119 register hrtime_t __value __asm__("o0");
120 __asm__ __volatile__(
121 "ta 0x24\n\t"
122 "sllx %%o0, 32, %0\n\t"
123 "or %%o1, %0, %0"
124 : "=r" (__value)
125 : : "o1", "o2", "o3", "o4", "o5", "cc");
126 #elif defined(__sparc)
127 register hrtime_t __value __asm__("o0");
128 __asm__ __volatile__(
129 "ta 0x24"
130 : "=r" (__value)
131 : : "o2", "o3", "o4", "o5", "cc");
132 #else
133 #error "port me"
134 #endif
135 return (__value);
136 }
137
138 extern __GNU_INLINE int
set_lock_byte(volatile uint8_t * __lockp)139 set_lock_byte(volatile uint8_t *__lockp)
140 {
141 int __value;
142 #if defined(__x86)
143 __asm__ __volatile__(
144 "movl $1, %0\n\t"
145 "xchgb %%dl, %1"
146 : "+d" (__value), "+m" (*__lockp));
147 #elif defined(__sparc)
148 __asm__ __volatile__(
149 "ldstub %1, %0\n\t"
150 "membar #LoadLoad"
151 : "=r" (__value), "+m" (*__lockp));
152 #else
153 #error "port me"
154 #endif
155 return (__value);
156 }
157
158 extern __GNU_INLINE uint32_t
atomic_swap_32(volatile uint32_t * __memory,uint32_t __value)159 atomic_swap_32(volatile uint32_t *__memory, uint32_t __value)
160 {
161 #if defined(__x86)
162 __asm__ __volatile__(
163 "xchgl %0, %1"
164 : "+q" (__value), "+m" (*__memory));
165 return (__value);
166 #elif defined(__sparc)
167 uint32_t __tmp1, __tmp2;
168 __asm__ __volatile__(
169 "ld [%3], %0\n\t"
170 "1:\n\t"
171 "mov %4, %1\n\t"
172 "cas [%3], %0, %1\n\t"
173 "cmp %0, %1\n\t"
174 "bne,a,pn %%icc, 1b\n\t"
175 " mov %1, %0"
176 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
177 : "r" (__memory), "r" (__value)
178 : "cc");
179 return (__tmp2);
180 #else
181 #error "port me"
182 #endif
183 }
184
185 extern __GNU_INLINE uint32_t
atomic_cas_32(volatile uint32_t * __memory,uint32_t __cmp,uint32_t __newvalue)186 atomic_cas_32(volatile uint32_t *__memory, uint32_t __cmp, uint32_t __newvalue)
187 {
188 uint32_t __oldvalue;
189 #if defined(__x86)
190 __asm__ __volatile__(
191 "lock; cmpxchgl %3, %0"
192 : "=m" (*__memory), "=a" (__oldvalue)
193 : "a" (__cmp), "r" (__newvalue));
194 #elif defined(__sparc)
195 __asm__ __volatile__(
196 "cas [%2], %3, %1"
197 : "=m" (*__memory), "=&r" (__oldvalue)
198 : "r" (__memory), "r" (__cmp), "1" (__newvalue));
199 #else
200 #error "port me"
201 #endif
202 return (__oldvalue);
203 }
204
205 extern __GNU_INLINE void
atomic_inc_32(volatile uint32_t * __memory)206 atomic_inc_32(volatile uint32_t *__memory)
207 {
208 #if defined(__x86)
209 __asm__ __volatile__(
210 "lock; incl %0"
211 : "+m" (*__memory));
212 #elif defined(__sparc)
213 uint32_t __tmp1, __tmp2;
214 __asm__ __volatile__(
215 "ld [%3], %0\n\t"
216 "1:\n\t"
217 "add %0, 1, %1\n\t"
218 "cas [%3], %0, %1\n\t"
219 "cmp %0, %1\n\t"
220 "bne,a,pn %%icc, 1b\n\t"
221 " mov %1, %0"
222 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
223 : "r" (__memory)
224 : "cc");
225 #else
226 #error "port me"
227 #endif
228 }
229
230 extern __GNU_INLINE void
atomic_dec_32(volatile uint32_t * __memory)231 atomic_dec_32(volatile uint32_t *__memory)
232 {
233 #if defined(__x86)
234 __asm__ __volatile__(
235 "lock; decl %0"
236 : "+m" (*__memory));
237 #elif defined(__sparc)
238 uint32_t __tmp1, __tmp2;
239 __asm__ __volatile__(
240 "ld [%3], %0\n\t"
241 "1:\n\t"
242 "sub %0, 1, %1\n\t"
243 "cas [%3], %0, %1\n\t"
244 "cmp %0, %1\n\t"
245 "bne,a,pn %%icc, 1b\n\t"
246 " mov %1, %0"
247 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
248 : "r" (__memory)
249 : "cc");
250 #else
251 #error "port me"
252 #endif
253 }
254
255 extern __GNU_INLINE void
atomic_and_32(volatile uint32_t * __memory,uint32_t __bits)256 atomic_and_32(volatile uint32_t *__memory, uint32_t __bits)
257 {
258 #if defined(__x86)
259 __asm__ __volatile__(
260 "lock; andl %1, %0"
261 : "+m" (*__memory)
262 : "r" (__bits));
263 #elif defined(__sparc)
264 uint32_t __tmp1, __tmp2;
265 __asm__ __volatile__(
266 "ld [%3], %0\n\t"
267 "1:\n\t"
268 "and %0, %4, %1\n\t"
269 "cas [%3], %0, %1\n\t"
270 "cmp %0, %1\n\t"
271 "bne,a,pn %%icc, 1b\n\t"
272 " mov %1, %0"
273 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
274 : "r" (__memory), "r" (__bits)
275 : "cc");
276 #else
277 #error "port me"
278 #endif
279 }
280
281 extern __GNU_INLINE void
atomic_or_32(volatile uint32_t * __memory,uint32_t __bits)282 atomic_or_32(volatile uint32_t *__memory, uint32_t __bits)
283 {
284 #if defined(__x86)
285 __asm__ __volatile__(
286 "lock; orl %1, %0"
287 : "+m" (*__memory)
288 : "r" (__bits));
289 #elif defined(__sparc)
290 uint32_t __tmp1, __tmp2;
291 __asm__ __volatile__(
292 "ld [%3], %0\n\t"
293 "1:\n\t"
294 "or %0, %4, %1\n\t"
295 "cas [%3], %0, %1\n\t"
296 "cmp %0, %1\n\t"
297 "bne,a,pn %%icc, 1b\n\t"
298 " mov %1, %0"
299 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
300 : "r" (__memory), "r" (__bits)
301 : "cc");
302 #else
303 #error "port me"
304 #endif
305 }
306
307 #if defined(__sparc) /* only needed on sparc */
308
309 extern __GNU_INLINE ulong_t
caller(void)310 caller(void)
311 {
312 register ulong_t __value __asm__("i7");
313 return (__value);
314 }
315
316 extern __GNU_INLINE ulong_t
getfp(void)317 getfp(void)
318 {
319 register ulong_t __value __asm__("fp");
320 return (__value);
321 }
322
323 #endif /* __sparc */
324
325 #if defined(__x86) /* only needed on x86 */
326
327 extern __GNU_INLINE void
ht_pause(void)328 ht_pause(void)
329 {
330 __asm__ __volatile__("rep; nop");
331 }
332
333 #endif /* __x86 */
334
335 #endif /* !__lint && __GNUC__ */
336
337 #endif /* _THR_INLINES_H */
338