1 /* SPDX-License-Identifier: GPL-2.0 */
2
3 #ifndef __KVM_FPU_H_
4 #define __KVM_FPU_H_
5
6 #include <asm/fpu/api.h>
7
8 typedef u32 __attribute__((vector_size(16))) sse128_t;
9 #define __sse128_u union { sse128_t vec; u64 as_u64[2]; u32 as_u32[4]; }
10 #define sse128_lo(x) ({ __sse128_u t; t.vec = x; t.as_u64[0]; })
11 #define sse128_hi(x) ({ __sse128_u t; t.vec = x; t.as_u64[1]; })
12 #define sse128_l0(x) ({ __sse128_u t; t.vec = x; t.as_u32[0]; })
13 #define sse128_l1(x) ({ __sse128_u t; t.vec = x; t.as_u32[1]; })
14 #define sse128_l2(x) ({ __sse128_u t; t.vec = x; t.as_u32[2]; })
15 #define sse128_l3(x) ({ __sse128_u t; t.vec = x; t.as_u32[3]; })
16 #define sse128(lo, hi) ({ __sse128_u t; t.as_u64[0] = lo; t.as_u64[1] = hi; t.vec; })
17
18 typedef u32 __attribute__((vector_size(32))) avx256_t;
19
_kvm_read_avx_reg(int reg,avx256_t * data)20 static inline void _kvm_read_avx_reg(int reg, avx256_t *data)
21 {
22 switch (reg) {
23 case 0: asm("vmovdqa %%ymm0, %0" : "=m"(*data)); break;
24 case 1: asm("vmovdqa %%ymm1, %0" : "=m"(*data)); break;
25 case 2: asm("vmovdqa %%ymm2, %0" : "=m"(*data)); break;
26 case 3: asm("vmovdqa %%ymm3, %0" : "=m"(*data)); break;
27 case 4: asm("vmovdqa %%ymm4, %0" : "=m"(*data)); break;
28 case 5: asm("vmovdqa %%ymm5, %0" : "=m"(*data)); break;
29 case 6: asm("vmovdqa %%ymm6, %0" : "=m"(*data)); break;
30 case 7: asm("vmovdqa %%ymm7, %0" : "=m"(*data)); break;
31 #ifdef CONFIG_X86_64
32 case 8: asm("vmovdqa %%ymm8, %0" : "=m"(*data)); break;
33 case 9: asm("vmovdqa %%ymm9, %0" : "=m"(*data)); break;
34 case 10: asm("vmovdqa %%ymm10, %0" : "=m"(*data)); break;
35 case 11: asm("vmovdqa %%ymm11, %0" : "=m"(*data)); break;
36 case 12: asm("vmovdqa %%ymm12, %0" : "=m"(*data)); break;
37 case 13: asm("vmovdqa %%ymm13, %0" : "=m"(*data)); break;
38 case 14: asm("vmovdqa %%ymm14, %0" : "=m"(*data)); break;
39 case 15: asm("vmovdqa %%ymm15, %0" : "=m"(*data)); break;
40 #endif
41 default: BUG();
42 }
43 }
44
_kvm_write_avx_reg(int reg,const avx256_t * data)45 static inline void _kvm_write_avx_reg(int reg, const avx256_t *data)
46 {
47 switch (reg) {
48 case 0: asm("vmovdqa %0, %%ymm0" : : "m"(*data)); break;
49 case 1: asm("vmovdqa %0, %%ymm1" : : "m"(*data)); break;
50 case 2: asm("vmovdqa %0, %%ymm2" : : "m"(*data)); break;
51 case 3: asm("vmovdqa %0, %%ymm3" : : "m"(*data)); break;
52 case 4: asm("vmovdqa %0, %%ymm4" : : "m"(*data)); break;
53 case 5: asm("vmovdqa %0, %%ymm5" : : "m"(*data)); break;
54 case 6: asm("vmovdqa %0, %%ymm6" : : "m"(*data)); break;
55 case 7: asm("vmovdqa %0, %%ymm7" : : "m"(*data)); break;
56 #ifdef CONFIG_X86_64
57 case 8: asm("vmovdqa %0, %%ymm8" : : "m"(*data)); break;
58 case 9: asm("vmovdqa %0, %%ymm9" : : "m"(*data)); break;
59 case 10: asm("vmovdqa %0, %%ymm10" : : "m"(*data)); break;
60 case 11: asm("vmovdqa %0, %%ymm11" : : "m"(*data)); break;
61 case 12: asm("vmovdqa %0, %%ymm12" : : "m"(*data)); break;
62 case 13: asm("vmovdqa %0, %%ymm13" : : "m"(*data)); break;
63 case 14: asm("vmovdqa %0, %%ymm14" : : "m"(*data)); break;
64 case 15: asm("vmovdqa %0, %%ymm15" : : "m"(*data)); break;
65 #endif
66 default: BUG();
67 }
68 }
69
_kvm_read_sse_reg(int reg,sse128_t * data)70 static inline void _kvm_read_sse_reg(int reg, sse128_t *data)
71 {
72 switch (reg) {
73 case 0: asm("movdqa %%xmm0, %0" : "=m"(*data)); break;
74 case 1: asm("movdqa %%xmm1, %0" : "=m"(*data)); break;
75 case 2: asm("movdqa %%xmm2, %0" : "=m"(*data)); break;
76 case 3: asm("movdqa %%xmm3, %0" : "=m"(*data)); break;
77 case 4: asm("movdqa %%xmm4, %0" : "=m"(*data)); break;
78 case 5: asm("movdqa %%xmm5, %0" : "=m"(*data)); break;
79 case 6: asm("movdqa %%xmm6, %0" : "=m"(*data)); break;
80 case 7: asm("movdqa %%xmm7, %0" : "=m"(*data)); break;
81 #ifdef CONFIG_X86_64
82 case 8: asm("movdqa %%xmm8, %0" : "=m"(*data)); break;
83 case 9: asm("movdqa %%xmm9, %0" : "=m"(*data)); break;
84 case 10: asm("movdqa %%xmm10, %0" : "=m"(*data)); break;
85 case 11: asm("movdqa %%xmm11, %0" : "=m"(*data)); break;
86 case 12: asm("movdqa %%xmm12, %0" : "=m"(*data)); break;
87 case 13: asm("movdqa %%xmm13, %0" : "=m"(*data)); break;
88 case 14: asm("movdqa %%xmm14, %0" : "=m"(*data)); break;
89 case 15: asm("movdqa %%xmm15, %0" : "=m"(*data)); break;
90 #endif
91 default: BUG();
92 }
93 }
94
_kvm_write_sse_reg(int reg,const sse128_t * data)95 static inline void _kvm_write_sse_reg(int reg, const sse128_t *data)
96 {
97 switch (reg) {
98 case 0: asm("movdqa %0, %%xmm0" : : "m"(*data)); break;
99 case 1: asm("movdqa %0, %%xmm1" : : "m"(*data)); break;
100 case 2: asm("movdqa %0, %%xmm2" : : "m"(*data)); break;
101 case 3: asm("movdqa %0, %%xmm3" : : "m"(*data)); break;
102 case 4: asm("movdqa %0, %%xmm4" : : "m"(*data)); break;
103 case 5: asm("movdqa %0, %%xmm5" : : "m"(*data)); break;
104 case 6: asm("movdqa %0, %%xmm6" : : "m"(*data)); break;
105 case 7: asm("movdqa %0, %%xmm7" : : "m"(*data)); break;
106 #ifdef CONFIG_X86_64
107 case 8: asm("movdqa %0, %%xmm8" : : "m"(*data)); break;
108 case 9: asm("movdqa %0, %%xmm9" : : "m"(*data)); break;
109 case 10: asm("movdqa %0, %%xmm10" : : "m"(*data)); break;
110 case 11: asm("movdqa %0, %%xmm11" : : "m"(*data)); break;
111 case 12: asm("movdqa %0, %%xmm12" : : "m"(*data)); break;
112 case 13: asm("movdqa %0, %%xmm13" : : "m"(*data)); break;
113 case 14: asm("movdqa %0, %%xmm14" : : "m"(*data)); break;
114 case 15: asm("movdqa %0, %%xmm15" : : "m"(*data)); break;
115 #endif
116 default: BUG();
117 }
118 }
119
_kvm_read_mmx_reg(int reg,u64 * data)120 static inline void _kvm_read_mmx_reg(int reg, u64 *data)
121 {
122 switch (reg) {
123 case 0: asm("movq %%mm0, %0" : "=m"(*data)); break;
124 case 1: asm("movq %%mm1, %0" : "=m"(*data)); break;
125 case 2: asm("movq %%mm2, %0" : "=m"(*data)); break;
126 case 3: asm("movq %%mm3, %0" : "=m"(*data)); break;
127 case 4: asm("movq %%mm4, %0" : "=m"(*data)); break;
128 case 5: asm("movq %%mm5, %0" : "=m"(*data)); break;
129 case 6: asm("movq %%mm6, %0" : "=m"(*data)); break;
130 case 7: asm("movq %%mm7, %0" : "=m"(*data)); break;
131 default: BUG();
132 }
133 }
134
_kvm_write_mmx_reg(int reg,const u64 * data)135 static inline void _kvm_write_mmx_reg(int reg, const u64 *data)
136 {
137 switch (reg) {
138 case 0: asm("movq %0, %%mm0" : : "m"(*data)); break;
139 case 1: asm("movq %0, %%mm1" : : "m"(*data)); break;
140 case 2: asm("movq %0, %%mm2" : : "m"(*data)); break;
141 case 3: asm("movq %0, %%mm3" : : "m"(*data)); break;
142 case 4: asm("movq %0, %%mm4" : : "m"(*data)); break;
143 case 5: asm("movq %0, %%mm5" : : "m"(*data)); break;
144 case 6: asm("movq %0, %%mm6" : : "m"(*data)); break;
145 case 7: asm("movq %0, %%mm7" : : "m"(*data)); break;
146 default: BUG();
147 }
148 }
149
kvm_fpu_get(void)150 static inline void kvm_fpu_get(void)
151 {
152 fpregs_lock();
153
154 fpregs_assert_state_consistent();
155 if (test_thread_flag(TIF_NEED_FPU_LOAD))
156 switch_fpu_return();
157 }
158
kvm_fpu_put(void)159 static inline void kvm_fpu_put(void)
160 {
161 fpregs_unlock();
162 }
163
kvm_read_avx_reg(int reg,avx256_t * data)164 static inline void kvm_read_avx_reg(int reg, avx256_t *data)
165 {
166 kvm_fpu_get();
167 _kvm_read_avx_reg(reg, data);
168 kvm_fpu_put();
169 }
170
kvm_write_avx_reg(int reg,const avx256_t * data)171 static inline void kvm_write_avx_reg(int reg, const avx256_t *data)
172 {
173 kvm_fpu_get();
174 _kvm_write_avx_reg(reg, data);
175 kvm_fpu_put();
176 }
177
kvm_read_sse_reg(int reg,sse128_t * data)178 static inline void kvm_read_sse_reg(int reg, sse128_t *data)
179 {
180 kvm_fpu_get();
181 _kvm_read_sse_reg(reg, data);
182 kvm_fpu_put();
183 }
184
kvm_write_sse_reg(int reg,const sse128_t * data)185 static inline void kvm_write_sse_reg(int reg, const sse128_t *data)
186 {
187 kvm_fpu_get();
188 _kvm_write_sse_reg(reg, data);
189 kvm_fpu_put();
190 }
191
kvm_read_mmx_reg(int reg,u64 * data)192 static inline void kvm_read_mmx_reg(int reg, u64 *data)
193 {
194 kvm_fpu_get();
195 _kvm_read_mmx_reg(reg, data);
196 kvm_fpu_put();
197 }
198
kvm_write_mmx_reg(int reg,const u64 * data)199 static inline void kvm_write_mmx_reg(int reg, const u64 *data)
200 {
201 kvm_fpu_get();
202 _kvm_write_mmx_reg(reg, data);
203 kvm_fpu_put();
204 }
205
206 #endif
207