xref: /titanic_50/usr/src/uts/intel/asm/cpu.h (revision d019449136cec9f203f106de418421095790e4e2)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
23  * Use is subject to license terms.
24  */
25 
26 #ifndef _ASM_CPU_H
27 #define	_ASM_CPU_H
28 
29 #ifdef	__cplusplus
30 extern "C" {
31 #endif
32 
33 #if !defined(__lint) && defined(__GNUC__)
34 
35 #if defined(__i386) || defined(__amd64)
36 
37 extern __inline__ void
38 ht_pause(void)
39 {
40 	__asm__ __volatile__(
41 	    "pause");
42 }
43 
44 /*
45  * prefetch 64 bytes
46  *
47  * prefetch is an SSE extension which is not supported on
48  * older 32-bit processors, so define this as a no-op for now
49  */
50 
51 extern __inline__ void
52 prefetch_read_many(void *addr)
53 {
54 #if defined(__amd64)
55 	__asm__(
56 	    "prefetcht0 (%0);"
57 	    "prefetcht0 32(%0);"
58 	    : /* no output */
59 	    : "r" (addr));
60 #endif	/* __amd64 */
61 }
62 
63 extern __inline__ void
64 prefetch_read_once(void *addr)
65 {
66 #if defined(__amd64)
67 	__asm__(
68 	    "prefetchnta (%0);"
69 	    "prefetchnta 32(%0);"
70 	    : /* no output */
71 	    : "r" (addr));
72 #endif	/* __amd64 */
73 }
74 
75 extern __inline__ void
76 prefetch_write_many(void *addr)
77 {
78 #if defined(__amd64)
79 	__asm__(
80 	    "prefetcht0 (%0);"
81 	    "prefetcht0 32(%0);"
82 	    : /* no output */
83 	    : "r" (addr));
84 #endif	/* __amd64 */
85 }
86 
87 extern __inline__ void
88 prefetch_write_once(void *addr)
89 {
90 #if defined(__amd64)
91 	__asm__(
92 	    "prefetcht0 (%0);"
93 	    "prefetcht0 32(%0);"
94 	    : /* no output */
95 	    : "r" (addr));
96 #endif	/* __amd64 */
97 }
98 
99 #if !defined(__xpv)
100 
101 extern __inline__ void
102 cli(void)
103 {
104 	__asm__ __volatile__(
105 	    "cli" : : : "memory");
106 }
107 
108 extern __inline__ void
109 sti(void)
110 {
111 	__asm__ __volatile__(
112 	    "sti");
113 }
114 
115 extern __inline__ void
116 i86_halt(void)
117 {
118 	__asm__ __volatile__(
119 	    "sti; hlt");
120 }
121 
122 #endif /* !__xpv */
123 
124 #endif	/* __i386 || defined(__amd64) */
125 
126 #if defined(__amd64)
127 
128 extern __inline__ void
129 __set_ds(selector_t value)
130 {
131 	__asm__ __volatile__(
132 	    "movw	%0, %%ds"
133 	    : /* no output */
134 	    : "r" (value));
135 }
136 
137 extern __inline__ void
138 __set_es(selector_t value)
139 {
140 	__asm__ __volatile__(
141 	    "movw	%0, %%es"
142 	    : /* no output */
143 	    : "r" (value));
144 }
145 
146 extern __inline__ void
147 __set_fs(selector_t value)
148 {
149 	__asm__ __volatile__(
150 	    "movw	%0, %%fs"
151 	    : /* no output */
152 	    : "r" (value));
153 }
154 
155 extern __inline__ void
156 __set_gs(selector_t value)
157 {
158 	__asm__ __volatile__(
159 	    "movw	%0, %%gs"
160 	    : /* no output */
161 	    : "r" (value));
162 }
163 
164 #if !defined(__xpv)
165 
166 extern __inline__ void
167 __swapgs(void)
168 {
169 	__asm__ __volatile__(
170 	    "mfence; swapgs");
171 }
172 
173 #endif /* !__xpv */
174 
175 #endif	/* __amd64 */
176 
177 #endif	/* !__lint && __GNUC__ */
178 
179 #ifdef	__cplusplus
180 }
181 #endif
182 
183 #endif	/* _ASM_CPU_H */
184