xref: /linux/tools/testing/selftests/kvm/x86/fastops_test.c (revision 1f2bbbbbda57f1939d757f5021ec0e3ea782ccf6)
1 // SPDX-License-Identifier: GPL-2.0-only
2 #include "test_util.h"
3 #include "kvm_util.h"
4 #include "processor.h"
5 
6 /*
7  * Execute a fastop() instruction, with or without forced emulation.  BT bit 0
8  * to set RFLAGS.CF based on whether or not the input is even or odd, so that
9  * instructions like ADC and SBB are deterministic.
10  */
11 #define fastop(__insn)									\
12 	"bt $0, %[bt_val]\n\t"								\
13 	__insn "\n\t"									\
14 	"pushfq\n\t"									\
15 	"pop %[flags]\n\t"
16 
17 #define flags_constraint(flags_val) [flags]"=r"(flags_val)
18 #define bt_constraint(__bt_val) [bt_val]"rm"((uint32_t)__bt_val)
19 
20 #define guest_execute_fastop_1(FEP, insn, __val, __flags)				\
21 ({											\
22 	__asm__ __volatile__(fastop(FEP insn " %[val]")					\
23 			     : [val]"+r"(__val), flags_constraint(__flags)		\
24 			     : bt_constraint(__val)					\
25 			     : "cc", "memory");						\
26 })
27 
28 #define guest_test_fastop_1(insn, type_t, __val)					\
29 ({											\
30 	type_t val = __val, ex_val = __val, input = __val;				\
31 	uint64_t flags, ex_flags;							\
32 											\
33 	guest_execute_fastop_1("", insn, ex_val, ex_flags);				\
34 	guest_execute_fastop_1(KVM_FEP, insn, val, flags);				\
35 											\
36 	__GUEST_ASSERT(val == ex_val,							\
37 		       "Wanted 0x%lx for '%s 0x%lx', got 0x%lx",			\
38 		       (uint64_t)ex_val, insn, (uint64_t)input, (uint64_t)val);		\
39 	__GUEST_ASSERT(flags == ex_flags,						\
40 			"Wanted flags 0x%lx for '%s 0x%lx', got 0x%lx",			\
41 			ex_flags, insn, (uint64_t)input, flags);			\
42 })
43 
44 #define guest_execute_fastop_2(FEP, insn, __input, __output, __flags)			\
45 ({											\
46 	__asm__ __volatile__(fastop(FEP insn " %[input], %[output]")			\
47 			     : [output]"+r"(__output), flags_constraint(__flags)	\
48 			     : [input]"r"(__input), bt_constraint(__output)		\
49 			     : "cc", "memory");						\
50 })
51 
52 #define guest_test_fastop_2(insn, type_t, __val1, __val2)				\
53 ({											\
54 	type_t input = __val1, input2 = __val2, output = __val2, ex_output = __val2;	\
55 	uint64_t flags, ex_flags;							\
56 											\
57 	guest_execute_fastop_2("", insn, input, ex_output, ex_flags);			\
58 	guest_execute_fastop_2(KVM_FEP, insn, input, output, flags);			\
59 											\
60 	__GUEST_ASSERT(output == ex_output,						\
61 		       "Wanted 0x%lx for '%s 0x%lx 0x%lx', got 0x%lx",			\
62 		       (uint64_t)ex_output, insn, (uint64_t)input,			\
63 		       (uint64_t)input2, (uint64_t)output);				\
64 	__GUEST_ASSERT(flags == ex_flags,						\
65 			"Wanted flags 0x%lx for '%s 0x%lx, 0x%lx', got 0x%lx",		\
66 			ex_flags, insn, (uint64_t)input, (uint64_t)input2, flags);	\
67 })
68 
69 #define guest_execute_fastop_cl(FEP, insn, __shift, __output, __flags)			\
70 ({											\
71 	__asm__ __volatile__(fastop(FEP insn " %%cl, %[output]")			\
72 			     : [output]"+r"(__output), flags_constraint(__flags)	\
73 			     : "c"(__shift), bt_constraint(__output)			\
74 			     : "cc", "memory");						\
75 })
76 
77 #define guest_test_fastop_cl(insn, type_t, __val1, __val2)				\
78 ({											\
79 	type_t output = __val2, ex_output = __val2, input = __val2;			\
80 	uint8_t shift = __val1;								\
81 	uint64_t flags, ex_flags;							\
82 											\
83 	guest_execute_fastop_cl("", insn, shift, ex_output, ex_flags);			\
84 	guest_execute_fastop_cl(KVM_FEP, insn, shift, output, flags);			\
85 											\
86 	__GUEST_ASSERT(output == ex_output,						\
87 		       "Wanted 0x%lx for '%s 0x%x, 0x%lx', got 0x%lx",			\
88 		       (uint64_t)ex_output, insn, shift, (uint64_t)input,		\
89 		       (uint64_t)output);						\
90 	__GUEST_ASSERT(flags == ex_flags,						\
91 			"Wanted flags 0x%lx for '%s 0x%x, 0x%lx', got 0x%lx",		\
92 			ex_flags, insn, shift, (uint64_t)input, flags);			\
93 })
94 
95 #define guest_execute_fastop_div(__KVM_ASM_SAFE, insn, __a, __d, __rm, __flags)		\
96 ({											\
97 	uint64_t ign_error_code;							\
98 	uint8_t vector;									\
99 											\
100 	__asm__ __volatile__(fastop(__KVM_ASM_SAFE(insn " %[denom]"))			\
101 			     : "+a"(__a), "+d"(__d), flags_constraint(__flags),		\
102 			       KVM_ASM_SAFE_OUTPUTS(vector, ign_error_code)		\
103 			     : [denom]"rm"(__rm), bt_constraint(__rm)			\
104 			     : "cc", "memory", KVM_ASM_SAFE_CLOBBERS);			\
105 	vector;										\
106 })
107 
108 #define guest_test_fastop_div(insn, type_t, __val1, __val2)				\
109 ({											\
110 	type_t _a = __val1, _d = __val1, rm = __val2;					\
111 	type_t a = _a, d = _d, ex_a = _a, ex_d = _d;					\
112 	uint64_t flags, ex_flags;							\
113 	uint8_t v, ex_v;								\
114 											\
115 	ex_v = guest_execute_fastop_div(KVM_ASM_SAFE, insn, ex_a, ex_d, rm, ex_flags);	\
116 	v = guest_execute_fastop_div(KVM_ASM_SAFE_FEP, insn, a, d, rm, flags);		\
117 											\
118 	GUEST_ASSERT_EQ(v, ex_v);							\
119 	__GUEST_ASSERT(v == ex_v,							\
120 		       "Wanted vector 0x%x for '%s 0x%lx:0x%lx/0x%lx', got 0x%x",	\
121 		       ex_v, insn, (uint64_t)_a, (uint64_t)_d, (uint64_t)rm, v);	\
122 	__GUEST_ASSERT(a == ex_a && d == ex_d,						\
123 		       "Wanted 0x%lx:0x%lx for '%s 0x%lx:0x%lx/0x%lx', got 0x%lx:0x%lx",\
124 		       (uint64_t)ex_a, (uint64_t)ex_d, insn, (uint64_t)_a,		\
125 		       (uint64_t)_d, (uint64_t)rm, (uint64_t)a, (uint64_t)d);		\
126 	__GUEST_ASSERT(v || ex_v || (flags == ex_flags),				\
127 			"Wanted flags 0x%lx for '%s  0x%lx:0x%lx/0x%lx', got 0x%lx",	\
128 			ex_flags, insn, (uint64_t)_a, (uint64_t)_d, (uint64_t)rm, flags);\
129 })
130 
131 static const uint64_t vals[] = {
132 	0,
133 	1,
134 	2,
135 	4,
136 	7,
137 	0x5555555555555555,
138 	0xaaaaaaaaaaaaaaaa,
139 	0xfefefefefefefefe,
140 	0xffffffffffffffff,
141 };
142 
143 #define guest_test_fastops(type_t, suffix)						\
144 do {											\
145 	int i, j;									\
146 											\
147 	for (i = 0; i < ARRAY_SIZE(vals); i++) {					\
148 		guest_test_fastop_1("dec" suffix, type_t, vals[i]);			\
149 		guest_test_fastop_1("inc" suffix, type_t, vals[i]);			\
150 		guest_test_fastop_1("neg" suffix, type_t, vals[i]);			\
151 		guest_test_fastop_1("not" suffix, type_t, vals[i]);			\
152 											\
153 		for (j = 0; j < ARRAY_SIZE(vals); j++) {				\
154 			guest_test_fastop_2("add" suffix, type_t, vals[i], vals[j]);	\
155 			guest_test_fastop_2("adc" suffix, type_t, vals[i], vals[j]);	\
156 			guest_test_fastop_2("and" suffix, type_t, vals[i], vals[j]);	\
157 if (sizeof(type_t) != 1) {							\
158 			guest_test_fastop_2("bsf" suffix, type_t, vals[i], vals[j]);	\
159 			guest_test_fastop_2("bsr" suffix, type_t, vals[i], vals[j]);	\
160 			guest_test_fastop_2("bt" suffix, type_t, vals[i], vals[j]);	\
161 			guest_test_fastop_2("btc" suffix, type_t, vals[i], vals[j]);	\
162 			guest_test_fastop_2("btr" suffix, type_t, vals[i], vals[j]);	\
163 			guest_test_fastop_2("bts" suffix, type_t, vals[i], vals[j]);	\
164 			guest_test_fastop_2("imul" suffix, type_t, vals[i], vals[j]);	\
165 }											\
166 			guest_test_fastop_2("cmp" suffix, type_t, vals[i], vals[j]);	\
167 			guest_test_fastop_2("or" suffix, type_t, vals[i], vals[j]);	\
168 			guest_test_fastop_2("sbb" suffix, type_t, vals[i], vals[j]);	\
169 			guest_test_fastop_2("sub" suffix, type_t, vals[i], vals[j]);	\
170 			guest_test_fastop_2("test" suffix, type_t, vals[i], vals[j]);	\
171 			guest_test_fastop_2("xor" suffix, type_t, vals[i], vals[j]);	\
172 											\
173 			guest_test_fastop_cl("rol" suffix, type_t, vals[i], vals[j]);	\
174 			guest_test_fastop_cl("ror" suffix, type_t, vals[i], vals[j]);	\
175 			guest_test_fastop_cl("rcl" suffix, type_t, vals[i], vals[j]);	\
176 			guest_test_fastop_cl("rcr" suffix, type_t, vals[i], vals[j]);	\
177 			guest_test_fastop_cl("sar" suffix, type_t, vals[i], vals[j]);	\
178 			guest_test_fastop_cl("shl" suffix, type_t, vals[i], vals[j]);	\
179 			guest_test_fastop_cl("shr" suffix, type_t, vals[i], vals[j]);	\
180 											\
181 			guest_test_fastop_div("div" suffix, type_t, vals[i], vals[j]);	\
182 		}									\
183 	}										\
184 } while (0)
185 
guest_code(void)186 static void guest_code(void)
187 {
188 	guest_test_fastops(uint8_t, "b");
189 	guest_test_fastops(uint16_t, "w");
190 	guest_test_fastops(uint32_t, "l");
191 	guest_test_fastops(uint64_t, "q");
192 
193 	GUEST_DONE();
194 }
195 
main(int argc,char * argv[])196 int main(int argc, char *argv[])
197 {
198 	struct kvm_vcpu *vcpu;
199 	struct kvm_vm *vm;
200 
201 	TEST_REQUIRE(is_forced_emulation_enabled);
202 
203 	vm = vm_create_with_one_vcpu(&vcpu, guest_code);
204 
205 	vcpu_run(vcpu);
206 	TEST_ASSERT_EQ(get_ucall(vcpu, NULL), UCALL_DONE);
207 
208 	kvm_vm_free(vm);
209 }
210