xref: /linux/tools/testing/selftests/riscv/vector/validate_v_ptrace.c (revision cee73b1e840c154f64ace682cb477c1ae2e29cc4)
1600f72deSSergey Matyukevich // SPDX-License-Identifier: GPL-2.0-only
2600f72deSSergey Matyukevich #include <sys/ptrace.h>
3600f72deSSergey Matyukevich #include <sys/types.h>
4600f72deSSergey Matyukevich #include <sys/wait.h>
5600f72deSSergey Matyukevich #include <sys/uio.h>
6600f72deSSergey Matyukevich #include <unistd.h>
7600f72deSSergey Matyukevich #include <errno.h>
8600f72deSSergey Matyukevich 
9600f72deSSergey Matyukevich #include <linux/ptrace.h>
10600f72deSSergey Matyukevich #include <linux/elf.h>
11600f72deSSergey Matyukevich 
12600f72deSSergey Matyukevich #include "kselftest_harness.h"
13600f72deSSergey Matyukevich #include "v_helpers.h"
14600f72deSSergey Matyukevich 
1566d03044SSergey Matyukevich #define SR_FS_DIRTY	0x00006000UL
1666d03044SSergey Matyukevich #define CSR_VXRM_SHIFT	1
1766d03044SSergey Matyukevich 
18600f72deSSergey Matyukevich volatile unsigned long chld_lock;
19600f72deSSergey Matyukevich 
20600f72deSSergey Matyukevich TEST(ptrace_v_not_enabled)
21600f72deSSergey Matyukevich {
22600f72deSSergey Matyukevich 	pid_t pid;
23600f72deSSergey Matyukevich 
24600f72deSSergey Matyukevich 	if (!(is_vector_supported() || is_xtheadvector_supported()))
25600f72deSSergey Matyukevich 		SKIP(return, "Vector not supported");
26600f72deSSergey Matyukevich 
27600f72deSSergey Matyukevich 	chld_lock = 1;
28600f72deSSergey Matyukevich 	pid = fork();
29600f72deSSergey Matyukevich 	ASSERT_LE(0, pid)
30600f72deSSergey Matyukevich 		TH_LOG("fork: %m");
31600f72deSSergey Matyukevich 
32600f72deSSergey Matyukevich 	if (pid == 0) {
33600f72deSSergey Matyukevich 		while (chld_lock == 1)
34600f72deSSergey Matyukevich 			asm volatile("" : : "g"(chld_lock) : "memory");
35600f72deSSergey Matyukevich 
36600f72deSSergey Matyukevich 		asm volatile ("ebreak" : : : );
37600f72deSSergey Matyukevich 	} else {
38600f72deSSergey Matyukevich 		struct __riscv_v_regset_state *regset_data;
39600f72deSSergey Matyukevich 		unsigned long vlenb = get_vr_len();
40600f72deSSergey Matyukevich 		size_t regset_size;
41600f72deSSergey Matyukevich 		struct iovec iov;
42600f72deSSergey Matyukevich 		int status;
43600f72deSSergey Matyukevich 		int ret;
44600f72deSSergey Matyukevich 
45600f72deSSergey Matyukevich 		/* attach */
46600f72deSSergey Matyukevich 
47600f72deSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_ATTACH, pid, NULL, NULL));
48600f72deSSergey Matyukevich 		ASSERT_EQ(pid, waitpid(pid, &status, 0));
49600f72deSSergey Matyukevich 		ASSERT_TRUE(WIFSTOPPED(status));
50600f72deSSergey Matyukevich 
51600f72deSSergey Matyukevich 		/* unlock */
52600f72deSSergey Matyukevich 
53600f72deSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_POKEDATA, pid, &chld_lock, 0));
54600f72deSSergey Matyukevich 
55600f72deSSergey Matyukevich 		/* resume and wait for ebreak */
56600f72deSSergey Matyukevich 
57600f72deSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_CONT, pid, NULL, NULL));
58600f72deSSergey Matyukevich 		ASSERT_EQ(pid, waitpid(pid, &status, 0));
59600f72deSSergey Matyukevich 		ASSERT_TRUE(WIFSTOPPED(status));
60600f72deSSergey Matyukevich 
61600f72deSSergey Matyukevich 		/* try to read vector registers from the tracee */
62600f72deSSergey Matyukevich 
63600f72deSSergey Matyukevich 		regset_size = sizeof(*regset_data) + vlenb * 32;
64600f72deSSergey Matyukevich 		regset_data = calloc(1, regset_size);
65600f72deSSergey Matyukevich 
66600f72deSSergey Matyukevich 		iov.iov_base = regset_data;
67600f72deSSergey Matyukevich 		iov.iov_len = regset_size;
68600f72deSSergey Matyukevich 
69600f72deSSergey Matyukevich 		/* V extension is available, but not yet enabled for the tracee */
70600f72deSSergey Matyukevich 
71600f72deSSergey Matyukevich 		errno = 0;
72600f72deSSergey Matyukevich 		ret = ptrace(PTRACE_GETREGSET, pid, NT_RISCV_VECTOR, &iov);
73600f72deSSergey Matyukevich 		ASSERT_EQ(ENODATA, errno);
74600f72deSSergey Matyukevich 		ASSERT_EQ(-1, ret);
75600f72deSSergey Matyukevich 
76600f72deSSergey Matyukevich 		/* cleanup */
77600f72deSSergey Matyukevich 
78600f72deSSergey Matyukevich 		ASSERT_EQ(0, kill(pid, SIGKILL));
79600f72deSSergey Matyukevich 	}
80600f72deSSergey Matyukevich }
81600f72deSSergey Matyukevich 
8266d03044SSergey Matyukevich TEST(ptrace_v_early_debug)
8366d03044SSergey Matyukevich {
8466d03044SSergey Matyukevich 	static volatile unsigned long vstart;
8566d03044SSergey Matyukevich 	static volatile unsigned long vtype;
8666d03044SSergey Matyukevich 	static volatile unsigned long vlenb;
8766d03044SSergey Matyukevich 	static volatile unsigned long vcsr;
8866d03044SSergey Matyukevich 	static volatile unsigned long vl;
8966d03044SSergey Matyukevich 	bool xtheadvector;
9066d03044SSergey Matyukevich 	pid_t pid;
9166d03044SSergey Matyukevich 
9266d03044SSergey Matyukevich 	if (!(is_vector_supported() || is_xtheadvector_supported()))
9366d03044SSergey Matyukevich 		SKIP(return, "Vector not supported");
9466d03044SSergey Matyukevich 
9566d03044SSergey Matyukevich 	xtheadvector = is_xtheadvector_supported();
9666d03044SSergey Matyukevich 
9766d03044SSergey Matyukevich 	chld_lock = 1;
9866d03044SSergey Matyukevich 	pid = fork();
9966d03044SSergey Matyukevich 	ASSERT_LE(0, pid)
10066d03044SSergey Matyukevich 		TH_LOG("fork: %m");
10166d03044SSergey Matyukevich 
10266d03044SSergey Matyukevich 	if (pid == 0) {
10366d03044SSergey Matyukevich 		unsigned long vxsat, vxrm;
10466d03044SSergey Matyukevich 
10566d03044SSergey Matyukevich 		vlenb = get_vr_len();
10666d03044SSergey Matyukevich 
10766d03044SSergey Matyukevich 		while (chld_lock == 1)
10866d03044SSergey Matyukevich 			asm volatile ("" : : "g"(chld_lock) : "memory");
10966d03044SSergey Matyukevich 
11066d03044SSergey Matyukevich 		asm volatile (
11166d03044SSergey Matyukevich 			"csrr %[vstart], vstart\n"
11266d03044SSergey Matyukevich 			"csrr %[vtype], vtype\n"
11366d03044SSergey Matyukevich 			"csrr %[vl], vl\n"
11466d03044SSergey Matyukevich 			: [vtype] "=r"(vtype), [vstart] "=r"(vstart), [vl] "=r"(vl)
11566d03044SSergey Matyukevich 			:
11666d03044SSergey Matyukevich 			: "memory");
11766d03044SSergey Matyukevich 
11866d03044SSergey Matyukevich 		/* no 'is_xtheadvector_supported()' here to avoid clobbering v-state by syscall */
11966d03044SSergey Matyukevich 		if (xtheadvector) {
12066d03044SSergey Matyukevich 			asm volatile (
12166d03044SSergey Matyukevich 				"csrs sstatus, %[bit]\n"
12266d03044SSergey Matyukevich 				"csrr %[vxsat], vxsat\n"
12366d03044SSergey Matyukevich 				"csrr %[vxrm], vxrm\n"
12466d03044SSergey Matyukevich 				: [vxsat] "=r"(vxsat), [vxrm] "=r"(vxrm)
12566d03044SSergey Matyukevich 				: [bit] "r" (SR_FS_DIRTY)
12666d03044SSergey Matyukevich 				: "memory");
12766d03044SSergey Matyukevich 			vcsr = vxsat | vxrm << CSR_VXRM_SHIFT;
12866d03044SSergey Matyukevich 		} else {
12966d03044SSergey Matyukevich 			asm volatile (
13066d03044SSergey Matyukevich 				"csrr %[vcsr], vcsr\n"
13166d03044SSergey Matyukevich 				: [vcsr] "=r"(vcsr)
13266d03044SSergey Matyukevich 				:
13366d03044SSergey Matyukevich 				: "memory");
13466d03044SSergey Matyukevich 		}
13566d03044SSergey Matyukevich 
13666d03044SSergey Matyukevich 		asm volatile (
13766d03044SSergey Matyukevich 			".option push\n"
13866d03044SSergey Matyukevich 			".option norvc\n"
13966d03044SSergey Matyukevich 			"ebreak\n"
14066d03044SSergey Matyukevich 			".option pop\n");
14166d03044SSergey Matyukevich 	} else {
14266d03044SSergey Matyukevich 		struct __riscv_v_regset_state *regset_data;
14366d03044SSergey Matyukevich 		unsigned long vstart_csr;
14466d03044SSergey Matyukevich 		unsigned long vlenb_csr;
14566d03044SSergey Matyukevich 		unsigned long vtype_csr;
14666d03044SSergey Matyukevich 		unsigned long vcsr_csr;
14766d03044SSergey Matyukevich 		unsigned long vl_csr;
14866d03044SSergey Matyukevich 		size_t regset_size;
14966d03044SSergey Matyukevich 		struct iovec iov;
15066d03044SSergey Matyukevich 		int status;
15166d03044SSergey Matyukevich 
15266d03044SSergey Matyukevich 		/* attach */
15366d03044SSergey Matyukevich 
15466d03044SSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_ATTACH, pid, NULL, NULL));
15566d03044SSergey Matyukevich 		ASSERT_EQ(pid, waitpid(pid, &status, 0));
15666d03044SSergey Matyukevich 		ASSERT_TRUE(WIFSTOPPED(status));
15766d03044SSergey Matyukevich 
15866d03044SSergey Matyukevich 		/* unlock */
15966d03044SSergey Matyukevich 
16066d03044SSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_POKEDATA, pid, &chld_lock, 0));
16166d03044SSergey Matyukevich 
16266d03044SSergey Matyukevich 		/* resume and wait for ebreak */
16366d03044SSergey Matyukevich 
16466d03044SSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_CONT, pid, NULL, NULL));
16566d03044SSergey Matyukevich 		ASSERT_EQ(pid, waitpid(pid, &status, 0));
16666d03044SSergey Matyukevich 		ASSERT_TRUE(WIFSTOPPED(status));
16766d03044SSergey Matyukevich 
16866d03044SSergey Matyukevich 		/* read tracee vector csr regs using ptrace PEEKDATA */
16966d03044SSergey Matyukevich 
17066d03044SSergey Matyukevich 		errno = 0;
17166d03044SSergey Matyukevich 		vstart_csr = ptrace(PTRACE_PEEKDATA, pid, &vstart, NULL);
17266d03044SSergey Matyukevich 		ASSERT_FALSE((errno != 0) && (vstart_csr == -1));
17366d03044SSergey Matyukevich 
17466d03044SSergey Matyukevich 		errno = 0;
17566d03044SSergey Matyukevich 		vl_csr = ptrace(PTRACE_PEEKDATA, pid, &vl, NULL);
17666d03044SSergey Matyukevich 		ASSERT_FALSE((errno != 0) && (vl_csr == -1));
17766d03044SSergey Matyukevich 
17866d03044SSergey Matyukevich 		errno = 0;
17966d03044SSergey Matyukevich 		vtype_csr = ptrace(PTRACE_PEEKDATA, pid, &vtype, NULL);
18066d03044SSergey Matyukevich 		ASSERT_FALSE((errno != 0) && (vtype_csr == -1));
18166d03044SSergey Matyukevich 
18266d03044SSergey Matyukevich 		errno = 0;
18366d03044SSergey Matyukevich 		vcsr_csr = ptrace(PTRACE_PEEKDATA, pid, &vcsr, NULL);
18466d03044SSergey Matyukevich 		ASSERT_FALSE((errno != 0) && (vcsr_csr == -1));
18566d03044SSergey Matyukevich 
18666d03044SSergey Matyukevich 		errno = 0;
18766d03044SSergey Matyukevich 		vlenb_csr = ptrace(PTRACE_PEEKDATA, pid, &vlenb, NULL);
18866d03044SSergey Matyukevich 		ASSERT_FALSE((errno != 0) && (vlenb_csr == -1));
18966d03044SSergey Matyukevich 
19066d03044SSergey Matyukevich 		/* read tracee csr regs using ptrace GETREGSET */
19166d03044SSergey Matyukevich 
19266d03044SSergey Matyukevich 		regset_size = sizeof(*regset_data) + vlenb_csr * 32;
19366d03044SSergey Matyukevich 		regset_data = calloc(1, regset_size);
19466d03044SSergey Matyukevich 
19566d03044SSergey Matyukevich 		iov.iov_base = regset_data;
19666d03044SSergey Matyukevich 		iov.iov_len = regset_size;
19766d03044SSergey Matyukevich 
19866d03044SSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_GETREGSET, pid, NT_RISCV_VECTOR, &iov));
19966d03044SSergey Matyukevich 
20066d03044SSergey Matyukevich 		/* compare */
20166d03044SSergey Matyukevich 
20266d03044SSergey Matyukevich 		EXPECT_EQ(vstart_csr, regset_data->vstart);
20366d03044SSergey Matyukevich 		EXPECT_EQ(vtype_csr, regset_data->vtype);
20466d03044SSergey Matyukevich 		EXPECT_EQ(vlenb_csr, regset_data->vlenb);
20566d03044SSergey Matyukevich 		EXPECT_EQ(vcsr_csr, regset_data->vcsr);
20666d03044SSergey Matyukevich 		EXPECT_EQ(vl_csr, regset_data->vl);
20766d03044SSergey Matyukevich 
20866d03044SSergey Matyukevich 		/* cleanup */
20966d03044SSergey Matyukevich 
21066d03044SSergey Matyukevich 		ASSERT_EQ(0, kill(pid, SIGKILL));
21166d03044SSergey Matyukevich 	}
21266d03044SSergey Matyukevich }
21366d03044SSergey Matyukevich 
2143789d5eeSSergey Matyukevich TEST(ptrace_v_syscall_clobbering)
2153789d5eeSSergey Matyukevich {
2163789d5eeSSergey Matyukevich 	pid_t pid;
2173789d5eeSSergey Matyukevich 
2183789d5eeSSergey Matyukevich 	if (!is_vector_supported() && !is_xtheadvector_supported())
2193789d5eeSSergey Matyukevich 		SKIP(return, "Vector not supported");
2203789d5eeSSergey Matyukevich 
2213789d5eeSSergey Matyukevich 	chld_lock = 1;
2223789d5eeSSergey Matyukevich 	pid = fork();
2233789d5eeSSergey Matyukevich 	ASSERT_LE(0, pid)
2243789d5eeSSergey Matyukevich 		TH_LOG("fork: %m");
2253789d5eeSSergey Matyukevich 
2263789d5eeSSergey Matyukevich 	if (pid == 0) {
2273789d5eeSSergey Matyukevich 		unsigned long vl;
2283789d5eeSSergey Matyukevich 
2293789d5eeSSergey Matyukevich 		while (chld_lock == 1)
2303789d5eeSSergey Matyukevich 			asm volatile("" : : "g"(chld_lock) : "memory");
2313789d5eeSSergey Matyukevich 
2323789d5eeSSergey Matyukevich 		if (is_xtheadvector_supported()) {
2333789d5eeSSergey Matyukevich 			asm volatile (
2343789d5eeSSergey Matyukevich 				// 0 | zimm[10:0] | rs1 | 1 1 1 | rd |1010111| vsetvli
2353789d5eeSSergey Matyukevich 				// vsetvli	t4, x0, e16, m2, d1
2363789d5eeSSergey Matyukevich 				".4byte		0b00000000010100000111111011010111\n"
2373789d5eeSSergey Matyukevich 				"mv		%[new_vl], t4\n"
2383789d5eeSSergey Matyukevich 				: [new_vl] "=r" (vl) : : "t4");
2393789d5eeSSergey Matyukevich 		} else {
2403789d5eeSSergey Matyukevich 			asm volatile (
2413789d5eeSSergey Matyukevich 				".option push\n"
2423789d5eeSSergey Matyukevich 				".option arch, +zve32x\n"
2433789d5eeSSergey Matyukevich 				"vsetvli %[new_vl], x0, e16, m2, tu, mu\n"
2443789d5eeSSergey Matyukevich 				".option pop\n"
2453789d5eeSSergey Matyukevich 				: [new_vl] "=r"(vl) : : );
2463789d5eeSSergey Matyukevich 		}
2473789d5eeSSergey Matyukevich 
2483789d5eeSSergey Matyukevich 		while (1) {
2493789d5eeSSergey Matyukevich 			asm volatile (
2503789d5eeSSergey Matyukevich 				".option push\n"
2513789d5eeSSergey Matyukevich 				".option norvc\n"
2523789d5eeSSergey Matyukevich 				"ebreak\n"
2533789d5eeSSergey Matyukevich 				".option pop\n");
2543789d5eeSSergey Matyukevich 
2553789d5eeSSergey Matyukevich 			sleep(0);
2563789d5eeSSergey Matyukevich 		}
2573789d5eeSSergey Matyukevich 	} else {
2583789d5eeSSergey Matyukevich 		struct __riscv_v_regset_state *regset_data;
2593789d5eeSSergey Matyukevich 		unsigned long vlenb = get_vr_len();
2603789d5eeSSergey Matyukevich 		struct user_regs_struct regs;
2613789d5eeSSergey Matyukevich 		size_t regset_size;
2623789d5eeSSergey Matyukevich 		struct iovec iov;
2633789d5eeSSergey Matyukevich 		int status;
2643789d5eeSSergey Matyukevich 
2653789d5eeSSergey Matyukevich 		/* attach */
2663789d5eeSSergey Matyukevich 
2673789d5eeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_ATTACH, pid, NULL, NULL));
2683789d5eeSSergey Matyukevich 		ASSERT_EQ(pid, waitpid(pid, &status, 0));
2693789d5eeSSergey Matyukevich 		ASSERT_TRUE(WIFSTOPPED(status));
2703789d5eeSSergey Matyukevich 
2713789d5eeSSergey Matyukevich 		/* unlock */
2723789d5eeSSergey Matyukevich 
2733789d5eeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_POKEDATA, pid, &chld_lock, 0));
2743789d5eeSSergey Matyukevich 
2753789d5eeSSergey Matyukevich 		/* resume and wait for the 1st ebreak */
2763789d5eeSSergey Matyukevich 
2773789d5eeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_CONT, pid, NULL, NULL));
2783789d5eeSSergey Matyukevich 		ASSERT_EQ(pid, waitpid(pid, &status, 0));
2793789d5eeSSergey Matyukevich 		ASSERT_TRUE(WIFSTOPPED(status));
2803789d5eeSSergey Matyukevich 
2813789d5eeSSergey Matyukevich 		/* read tracee vector csr regs using ptrace GETREGSET */
2823789d5eeSSergey Matyukevich 
2833789d5eeSSergey Matyukevich 		regset_size = sizeof(*regset_data) + vlenb * 32;
2843789d5eeSSergey Matyukevich 		regset_data = calloc(1, regset_size);
2853789d5eeSSergey Matyukevich 
2863789d5eeSSergey Matyukevich 		iov.iov_base = regset_data;
2873789d5eeSSergey Matyukevich 		iov.iov_len = regset_size;
2883789d5eeSSergey Matyukevich 
2893789d5eeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_GETREGSET, pid, NT_RISCV_VECTOR, &iov));
2903789d5eeSSergey Matyukevich 
2913789d5eeSSergey Matyukevich 		/* verify initial vsetvli settings */
2923789d5eeSSergey Matyukevich 
2933789d5eeSSergey Matyukevich 		if (is_xtheadvector_supported())
2943789d5eeSSergey Matyukevich 			EXPECT_EQ(5UL, regset_data->vtype);
2953789d5eeSSergey Matyukevich 		else
2963789d5eeSSergey Matyukevich 			EXPECT_EQ(9UL, regset_data->vtype);
2973789d5eeSSergey Matyukevich 
2983789d5eeSSergey Matyukevich 		EXPECT_EQ(regset_data->vlenb, regset_data->vl);
2993789d5eeSSergey Matyukevich 		EXPECT_EQ(vlenb, regset_data->vlenb);
3003789d5eeSSergey Matyukevich 		EXPECT_EQ(0UL, regset_data->vstart);
3013789d5eeSSergey Matyukevich 		EXPECT_EQ(0UL, regset_data->vcsr);
3023789d5eeSSergey Matyukevich 
3033789d5eeSSergey Matyukevich 		/* skip 1st ebreak, then resume and wait for the 2nd ebreak */
3043789d5eeSSergey Matyukevich 
3053789d5eeSSergey Matyukevich 		iov.iov_base = &regs;
3063789d5eeSSergey Matyukevich 		iov.iov_len = sizeof(regs);
3073789d5eeSSergey Matyukevich 
3083789d5eeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_GETREGSET, pid, NT_PRSTATUS, &iov));
3093789d5eeSSergey Matyukevich 		regs.pc += 4;
3103789d5eeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_SETREGSET, pid, NT_PRSTATUS, &iov));
3113789d5eeSSergey Matyukevich 
3123789d5eeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_CONT, pid, NULL, NULL));
3133789d5eeSSergey Matyukevich 		ASSERT_EQ(pid, waitpid(pid, &status, 0));
3143789d5eeSSergey Matyukevich 		ASSERT_TRUE(WIFSTOPPED(status));
3153789d5eeSSergey Matyukevich 
3163789d5eeSSergey Matyukevich 		/* read tracee vtype using ptrace GETREGSET */
3173789d5eeSSergey Matyukevich 
3183789d5eeSSergey Matyukevich 		iov.iov_base = regset_data;
3193789d5eeSSergey Matyukevich 		iov.iov_len = regset_size;
3203789d5eeSSergey Matyukevich 
3213789d5eeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_GETREGSET, pid, NT_RISCV_VECTOR, &iov));
3223789d5eeSSergey Matyukevich 
3233789d5eeSSergey Matyukevich 		/* verify that V state is illegal after syscall */
3243789d5eeSSergey Matyukevich 
3253789d5eeSSergey Matyukevich 		EXPECT_EQ((1UL << (__riscv_xlen - 1)), regset_data->vtype);
3263789d5eeSSergey Matyukevich 		EXPECT_EQ(vlenb, regset_data->vlenb);
3273789d5eeSSergey Matyukevich 		EXPECT_EQ(0UL, regset_data->vstart);
3283789d5eeSSergey Matyukevich 		EXPECT_EQ(0UL, regset_data->vcsr);
3293789d5eeSSergey Matyukevich 		EXPECT_EQ(0UL, regset_data->vl);
3303789d5eeSSergey Matyukevich 
3313789d5eeSSergey Matyukevich 		/* cleanup */
3323789d5eeSSergey Matyukevich 
3333789d5eeSSergey Matyukevich 		ASSERT_EQ(0, kill(pid, SIGKILL));
3343789d5eeSSergey Matyukevich 	}
3353789d5eeSSergey Matyukevich }
3363789d5eeSSergey Matyukevich 
33730eb191cSSergey Matyukevich FIXTURE(v_csr_invalid)
33830eb191cSSergey Matyukevich {
33930eb191cSSergey Matyukevich };
34030eb191cSSergey Matyukevich 
34130eb191cSSergey Matyukevich FIXTURE_SETUP(v_csr_invalid)
34230eb191cSSergey Matyukevich {
34330eb191cSSergey Matyukevich }
34430eb191cSSergey Matyukevich 
34530eb191cSSergey Matyukevich FIXTURE_TEARDOWN(v_csr_invalid)
34630eb191cSSergey Matyukevich {
34730eb191cSSergey Matyukevich }
34830eb191cSSergey Matyukevich 
34930eb191cSSergey Matyukevich #define VECTOR_1_0		BIT(0)
35030eb191cSSergey Matyukevich #define XTHEAD_VECTOR_0_7	BIT(1)
35130eb191cSSergey Matyukevich 
35230eb191cSSergey Matyukevich #define vector_test(x)		((x) & VECTOR_1_0)
35330eb191cSSergey Matyukevich #define xthead_test(x)		((x) & XTHEAD_VECTOR_0_7)
35430eb191cSSergey Matyukevich 
35530eb191cSSergey Matyukevich /* modifications of the initial vsetvli settings */
35630eb191cSSergey Matyukevich FIXTURE_VARIANT(v_csr_invalid)
35730eb191cSSergey Matyukevich {
35830eb191cSSergey Matyukevich 	unsigned long vstart;
35930eb191cSSergey Matyukevich 	unsigned long vl;
36030eb191cSSergey Matyukevich 	unsigned long vtype;
36130eb191cSSergey Matyukevich 	unsigned long vcsr;
36230eb191cSSergey Matyukevich 	unsigned long vlenb_mul;
36330eb191cSSergey Matyukevich 	unsigned long vlenb_min;
36430eb191cSSergey Matyukevich 	unsigned long vlenb_max;
36530eb191cSSergey Matyukevich 	unsigned long spec;
36630eb191cSSergey Matyukevich };
36730eb191cSSergey Matyukevich 
36830eb191cSSergey Matyukevich /* unexpected vlenb value */
36930eb191cSSergey Matyukevich FIXTURE_VARIANT_ADD(v_csr_invalid, new_vlenb)
37030eb191cSSergey Matyukevich {
37130eb191cSSergey Matyukevich 	.vstart = 0x0,
37230eb191cSSergey Matyukevich 	.vl = 0x0,
37330eb191cSSergey Matyukevich 	.vtype = 0x3,
37430eb191cSSergey Matyukevich 	.vcsr = 0x0,
37530eb191cSSergey Matyukevich 	.vlenb_mul = 0x2,
37630eb191cSSergey Matyukevich 	.vlenb_min = 0x0,
37730eb191cSSergey Matyukevich 	.vlenb_max = 0x0,
37830eb191cSSergey Matyukevich 	.spec = VECTOR_1_0 | XTHEAD_VECTOR_0_7,
37930eb191cSSergey Matyukevich };
38030eb191cSSergey Matyukevich 
38130eb191cSSergey Matyukevich /* invalid reserved bits in vcsr */
38230eb191cSSergey Matyukevich FIXTURE_VARIANT_ADD(v_csr_invalid, vcsr_invalid_reserved_bits)
38330eb191cSSergey Matyukevich {
38430eb191cSSergey Matyukevich 	.vstart = 0x0,
38530eb191cSSergey Matyukevich 	.vl = 0x0,
38630eb191cSSergey Matyukevich 	.vtype = 0x3,
38730eb191cSSergey Matyukevich 	.vcsr = 0x1UL << 8,
38830eb191cSSergey Matyukevich 	.vlenb_mul = 0x1,
38930eb191cSSergey Matyukevich 	.vlenb_min = 0x0,
39030eb191cSSergey Matyukevich 	.vlenb_max = 0x0,
39130eb191cSSergey Matyukevich 	.spec = VECTOR_1_0 | XTHEAD_VECTOR_0_7,
39230eb191cSSergey Matyukevich };
39330eb191cSSergey Matyukevich 
39430eb191cSSergey Matyukevich /* invalid reserved bits in vtype */
39530eb191cSSergey Matyukevich FIXTURE_VARIANT_ADD(v_csr_invalid, vtype_invalid_reserved_bits)
39630eb191cSSergey Matyukevich {
39730eb191cSSergey Matyukevich 	.vstart = 0x0,
39830eb191cSSergey Matyukevich 	.vl = 0x0,
39930eb191cSSergey Matyukevich 	.vtype = (0x1UL << 8) | 0x3,
40030eb191cSSergey Matyukevich 	.vcsr = 0x0,
40130eb191cSSergey Matyukevich 	.vlenb_mul = 0x1,
40230eb191cSSergey Matyukevich 	.vlenb_min = 0x0,
40330eb191cSSergey Matyukevich 	.vlenb_max = 0x0,
40430eb191cSSergey Matyukevich 	.spec = VECTOR_1_0 | XTHEAD_VECTOR_0_7,
40530eb191cSSergey Matyukevich };
40630eb191cSSergey Matyukevich 
40730eb191cSSergey Matyukevich /* set vill bit */
40830eb191cSSergey Matyukevich FIXTURE_VARIANT_ADD(v_csr_invalid, invalid_vill_bit)
40930eb191cSSergey Matyukevich {
41030eb191cSSergey Matyukevich 	.vstart = 0x0,
41130eb191cSSergey Matyukevich 	.vl = 0x0,
41230eb191cSSergey Matyukevich 	.vtype = (0x1UL << (__riscv_xlen - 1)) | 0x3,
41330eb191cSSergey Matyukevich 	.vcsr = 0x0,
41430eb191cSSergey Matyukevich 	.vlenb_mul = 0x1,
41530eb191cSSergey Matyukevich 	.vlenb_min = 0x0,
41630eb191cSSergey Matyukevich 	.vlenb_max = 0x0,
41730eb191cSSergey Matyukevich 	.spec = VECTOR_1_0 | XTHEAD_VECTOR_0_7,
41830eb191cSSergey Matyukevich };
41930eb191cSSergey Matyukevich 
42030eb191cSSergey Matyukevich /* reserved vsew value: vsew > 3 */
42130eb191cSSergey Matyukevich FIXTURE_VARIANT_ADD(v_csr_invalid, reserved_vsew)
42230eb191cSSergey Matyukevich {
42330eb191cSSergey Matyukevich 	.vstart = 0x0,
42430eb191cSSergey Matyukevich 	.vl = 0x0,
42530eb191cSSergey Matyukevich 	.vtype = 0x4UL << 3,
42630eb191cSSergey Matyukevich 	.vcsr = 0x0,
42730eb191cSSergey Matyukevich 	.vlenb_mul = 0x1,
42830eb191cSSergey Matyukevich 	.vlenb_min = 0x0,
42930eb191cSSergey Matyukevich 	.vlenb_max = 0x0,
43030eb191cSSergey Matyukevich 	.spec = VECTOR_1_0,
43130eb191cSSergey Matyukevich };
43230eb191cSSergey Matyukevich 
43330eb191cSSergey Matyukevich /* XTheadVector: unsupported non-zero VEDIV value */
43430eb191cSSergey Matyukevich FIXTURE_VARIANT_ADD(v_csr_invalid, reserved_vediv)
43530eb191cSSergey Matyukevich {
43630eb191cSSergey Matyukevich 	.vstart = 0x0,
43730eb191cSSergey Matyukevich 	.vl = 0x0,
43830eb191cSSergey Matyukevich 	.vtype = 0x3UL << 5,
43930eb191cSSergey Matyukevich 	.vcsr = 0x0,
44030eb191cSSergey Matyukevich 	.vlenb_mul = 0x1,
44130eb191cSSergey Matyukevich 	.vlenb_min = 0x0,
44230eb191cSSergey Matyukevich 	.vlenb_max = 0x0,
44330eb191cSSergey Matyukevich 	.spec = XTHEAD_VECTOR_0_7,
44430eb191cSSergey Matyukevich };
44530eb191cSSergey Matyukevich 
44630eb191cSSergey Matyukevich /* reserved vlmul value: vlmul == 4 */
44730eb191cSSergey Matyukevich FIXTURE_VARIANT_ADD(v_csr_invalid, reserved_vlmul)
44830eb191cSSergey Matyukevich {
44930eb191cSSergey Matyukevich 	.vstart = 0x0,
45030eb191cSSergey Matyukevich 	.vl = 0x0,
45130eb191cSSergey Matyukevich 	.vtype = 0x4,
45230eb191cSSergey Matyukevich 	.vcsr = 0x0,
45330eb191cSSergey Matyukevich 	.vlenb_mul = 0x1,
45430eb191cSSergey Matyukevich 	.vlenb_min = 0x0,
45530eb191cSSergey Matyukevich 	.vlenb_max = 0x0,
45630eb191cSSergey Matyukevich 	.spec = VECTOR_1_0,
45730eb191cSSergey Matyukevich };
45830eb191cSSergey Matyukevich 
45930eb191cSSergey Matyukevich /* invalid fractional LMUL for VLEN <= 256: LMUL= 1/8, SEW = 64 */
46030eb191cSSergey Matyukevich FIXTURE_VARIANT_ADD(v_csr_invalid, frac_lmul1)
46130eb191cSSergey Matyukevich {
46230eb191cSSergey Matyukevich 	.vstart = 0x0,
46330eb191cSSergey Matyukevich 	.vl = 0x0,
46430eb191cSSergey Matyukevich 	.vtype = 0x1d,
46530eb191cSSergey Matyukevich 	.vcsr = 0x0,
46630eb191cSSergey Matyukevich 	.vlenb_mul = 0x1,
46730eb191cSSergey Matyukevich 	.vlenb_min = 0x0,
46830eb191cSSergey Matyukevich 	.vlenb_max = 0x20,
46930eb191cSSergey Matyukevich 	.spec = VECTOR_1_0,
47030eb191cSSergey Matyukevich };
47130eb191cSSergey Matyukevich 
47230eb191cSSergey Matyukevich /* invalid integral LMUL for VLEN <= 16: LMUL= 2, SEW = 64 */
47330eb191cSSergey Matyukevich FIXTURE_VARIANT_ADD(v_csr_invalid, int_lmul1)
47430eb191cSSergey Matyukevich {
47530eb191cSSergey Matyukevich 	.vstart = 0x0,
47630eb191cSSergey Matyukevich 	.vl = 0x0,
47730eb191cSSergey Matyukevich 	.vtype = 0x19,
47830eb191cSSergey Matyukevich 	.vcsr = 0x0,
47930eb191cSSergey Matyukevich 	.vlenb_mul = 0x1,
48030eb191cSSergey Matyukevich 	.vlenb_min = 0x0,
48130eb191cSSergey Matyukevich 	.vlenb_max = 0x2,
48230eb191cSSergey Matyukevich 	.spec = VECTOR_1_0,
48330eb191cSSergey Matyukevich };
48430eb191cSSergey Matyukevich 
48530eb191cSSergey Matyukevich /* XTheadVector: invalid integral LMUL for VLEN <= 16: LMUL= 2, SEW = 64 */
48630eb191cSSergey Matyukevich FIXTURE_VARIANT_ADD(v_csr_invalid, int_lmul2)
48730eb191cSSergey Matyukevich {
48830eb191cSSergey Matyukevich 	.vstart = 0x0,
48930eb191cSSergey Matyukevich 	.vl = 0x0,
49030eb191cSSergey Matyukevich 	.vtype = 0xd,
49130eb191cSSergey Matyukevich 	.vcsr = 0x0,
49230eb191cSSergey Matyukevich 	.vlenb_mul = 0x1,
49330eb191cSSergey Matyukevich 	.vlenb_min = 0x0,
49430eb191cSSergey Matyukevich 	.vlenb_max = 0x2,
49530eb191cSSergey Matyukevich 	.spec = XTHEAD_VECTOR_0_7,
49630eb191cSSergey Matyukevich };
49730eb191cSSergey Matyukevich 
49830eb191cSSergey Matyukevich /* invalid VL for VLEN <= 128: LMUL= 2, SEW = 64, VL = 8 */
49930eb191cSSergey Matyukevich FIXTURE_VARIANT_ADD(v_csr_invalid, vl1)
50030eb191cSSergey Matyukevich {
50130eb191cSSergey Matyukevich 	.vstart = 0x0,
50230eb191cSSergey Matyukevich 	.vl = 0x8,
50330eb191cSSergey Matyukevich 	.vtype = 0x19,
50430eb191cSSergey Matyukevich 	.vcsr = 0x0,
50530eb191cSSergey Matyukevich 	.vlenb_mul = 0x1,
50630eb191cSSergey Matyukevich 	.vlenb_min = 0x0,
50730eb191cSSergey Matyukevich 	.vlenb_max = 0x10,
50830eb191cSSergey Matyukevich 	.spec = VECTOR_1_0,
50930eb191cSSergey Matyukevich };
51030eb191cSSergey Matyukevich 
51130eb191cSSergey Matyukevich /* XTheadVector: invalid VL for VLEN <= 128: LMUL= 2, SEW = 64, VL = 8 */
51230eb191cSSergey Matyukevich FIXTURE_VARIANT_ADD(v_csr_invalid, vl2)
51330eb191cSSergey Matyukevich {
51430eb191cSSergey Matyukevich 	.vstart = 0x0,
51530eb191cSSergey Matyukevich 	.vl = 0x8,
51630eb191cSSergey Matyukevich 	.vtype = 0xd,
51730eb191cSSergey Matyukevich 	.vcsr = 0x0,
51830eb191cSSergey Matyukevich 	.vlenb_mul = 0x1,
51930eb191cSSergey Matyukevich 	.vlenb_min = 0x0,
52030eb191cSSergey Matyukevich 	.vlenb_max = 0x10,
52130eb191cSSergey Matyukevich 	.spec = XTHEAD_VECTOR_0_7,
52230eb191cSSergey Matyukevich };
52330eb191cSSergey Matyukevich 
52430eb191cSSergey Matyukevich TEST_F(v_csr_invalid, ptrace_v_invalid_values)
52530eb191cSSergey Matyukevich {
52630eb191cSSergey Matyukevich 	unsigned long vlenb;
52730eb191cSSergey Matyukevich 	pid_t pid;
52830eb191cSSergey Matyukevich 
52930eb191cSSergey Matyukevich 	if (!is_vector_supported() && !is_xtheadvector_supported())
53030eb191cSSergey Matyukevich 		SKIP(return, "Vectors not supported");
53130eb191cSSergey Matyukevich 
53230eb191cSSergey Matyukevich 	if (is_vector_supported() && !vector_test(variant->spec))
53330eb191cSSergey Matyukevich 		SKIP(return, "Test not supported for Vector");
53430eb191cSSergey Matyukevich 
53530eb191cSSergey Matyukevich 	if (is_xtheadvector_supported() && !xthead_test(variant->spec))
53630eb191cSSergey Matyukevich 		SKIP(return, "Test not supported for XTheadVector");
53730eb191cSSergey Matyukevich 
53830eb191cSSergey Matyukevich 	vlenb = get_vr_len();
53930eb191cSSergey Matyukevich 
54030eb191cSSergey Matyukevich 	if (variant->vlenb_min) {
54130eb191cSSergey Matyukevich 		if (vlenb < variant->vlenb_min)
54230eb191cSSergey Matyukevich 			SKIP(return, "This test does not support VLEN < %lu\n",
54330eb191cSSergey Matyukevich 			     variant->vlenb_min * 8);
54430eb191cSSergey Matyukevich 	}
54530eb191cSSergey Matyukevich 
54630eb191cSSergey Matyukevich 	if (variant->vlenb_max) {
54730eb191cSSergey Matyukevich 		if (vlenb > variant->vlenb_max)
54830eb191cSSergey Matyukevich 			SKIP(return, "This test does not support VLEN > %lu\n",
54930eb191cSSergey Matyukevich 			     variant->vlenb_max * 8);
55030eb191cSSergey Matyukevich 	}
55130eb191cSSergey Matyukevich 
55230eb191cSSergey Matyukevich 	chld_lock = 1;
55330eb191cSSergey Matyukevich 	pid = fork();
55430eb191cSSergey Matyukevich 	ASSERT_LE(0, pid)
55530eb191cSSergey Matyukevich 		TH_LOG("fork: %m");
55630eb191cSSergey Matyukevich 
55730eb191cSSergey Matyukevich 	if (pid == 0) {
55830eb191cSSergey Matyukevich 		unsigned long vl;
55930eb191cSSergey Matyukevich 
56030eb191cSSergey Matyukevich 		while (chld_lock == 1)
56130eb191cSSergey Matyukevich 			asm volatile("" : : "g"(chld_lock) : "memory");
56230eb191cSSergey Matyukevich 
56330eb191cSSergey Matyukevich 		if (is_xtheadvector_supported()) {
56430eb191cSSergey Matyukevich 			asm volatile (
56530eb191cSSergey Matyukevich 				// 0 | zimm[10:0] | rs1 | 1 1 1 | rd |1010111| vsetvli
56630eb191cSSergey Matyukevich 				// vsetvli	t4, x0, e16, m2, d1
56730eb191cSSergey Matyukevich 				".4byte		0b00000000010100000111111011010111\n"
56830eb191cSSergey Matyukevich 				"mv		%[new_vl], t4\n"
56930eb191cSSergey Matyukevich 				: [new_vl] "=r" (vl) : : "t4");
57030eb191cSSergey Matyukevich 		} else {
57130eb191cSSergey Matyukevich 			asm volatile (
57230eb191cSSergey Matyukevich 				".option push\n"
57330eb191cSSergey Matyukevich 				".option arch, +zve32x\n"
57430eb191cSSergey Matyukevich 				"vsetvli %[new_vl], x0, e16, m2, tu, mu\n"
57530eb191cSSergey Matyukevich 				".option pop\n"
57630eb191cSSergey Matyukevich 				: [new_vl] "=r"(vl) : : );
57730eb191cSSergey Matyukevich 		}
57830eb191cSSergey Matyukevich 
57930eb191cSSergey Matyukevich 		while (1) {
58030eb191cSSergey Matyukevich 			asm volatile (
58130eb191cSSergey Matyukevich 				".option push\n"
58230eb191cSSergey Matyukevich 				".option norvc\n"
58330eb191cSSergey Matyukevich 				"ebreak\n"
58430eb191cSSergey Matyukevich 				"nop\n"
58530eb191cSSergey Matyukevich 				".option pop\n");
58630eb191cSSergey Matyukevich 		}
58730eb191cSSergey Matyukevich 	} else {
58830eb191cSSergey Matyukevich 		struct __riscv_v_regset_state *regset_data;
58930eb191cSSergey Matyukevich 		size_t regset_size;
59030eb191cSSergey Matyukevich 		struct iovec iov;
59130eb191cSSergey Matyukevich 		int status;
59230eb191cSSergey Matyukevich 		int ret;
59330eb191cSSergey Matyukevich 
59430eb191cSSergey Matyukevich 		/* attach */
59530eb191cSSergey Matyukevich 
59630eb191cSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_ATTACH, pid, NULL, NULL));
59730eb191cSSergey Matyukevich 		ASSERT_EQ(pid, waitpid(pid, &status, 0));
59830eb191cSSergey Matyukevich 		ASSERT_TRUE(WIFSTOPPED(status));
59930eb191cSSergey Matyukevich 
60030eb191cSSergey Matyukevich 		/* unlock */
60130eb191cSSergey Matyukevich 
60230eb191cSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_POKEDATA, pid, &chld_lock, 0));
60330eb191cSSergey Matyukevich 
60430eb191cSSergey Matyukevich 		/* resume and wait for the 1st ebreak */
60530eb191cSSergey Matyukevich 
60630eb191cSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_CONT, pid, NULL, NULL));
60730eb191cSSergey Matyukevich 		ASSERT_EQ(pid, waitpid(pid, &status, 0));
60830eb191cSSergey Matyukevich 		ASSERT_TRUE(WIFSTOPPED(status));
60930eb191cSSergey Matyukevich 
61030eb191cSSergey Matyukevich 		/* read tracee vector csr regs using ptrace GETREGSET */
61130eb191cSSergey Matyukevich 
61230eb191cSSergey Matyukevich 		regset_size = sizeof(*regset_data) + vlenb * 32;
61330eb191cSSergey Matyukevich 		regset_data = calloc(1, regset_size);
61430eb191cSSergey Matyukevich 
61530eb191cSSergey Matyukevich 		iov.iov_base = regset_data;
61630eb191cSSergey Matyukevich 		iov.iov_len = regset_size;
61730eb191cSSergey Matyukevich 
61830eb191cSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_GETREGSET, pid, NT_RISCV_VECTOR, &iov));
61930eb191cSSergey Matyukevich 
62030eb191cSSergey Matyukevich 		/* verify initial vsetvli settings */
62130eb191cSSergey Matyukevich 
62230eb191cSSergey Matyukevich 		if (is_xtheadvector_supported())
62330eb191cSSergey Matyukevich 			EXPECT_EQ(5UL, regset_data->vtype);
62430eb191cSSergey Matyukevich 		else
62530eb191cSSergey Matyukevich 			EXPECT_EQ(9UL, regset_data->vtype);
62630eb191cSSergey Matyukevich 
62730eb191cSSergey Matyukevich 		EXPECT_EQ(regset_data->vlenb, regset_data->vl);
62830eb191cSSergey Matyukevich 		EXPECT_EQ(vlenb, regset_data->vlenb);
62930eb191cSSergey Matyukevich 		EXPECT_EQ(0UL, regset_data->vstart);
63030eb191cSSergey Matyukevich 		EXPECT_EQ(0UL, regset_data->vcsr);
63130eb191cSSergey Matyukevich 
63230eb191cSSergey Matyukevich 		/* apply invalid settings from fixture variants */
63330eb191cSSergey Matyukevich 
63430eb191cSSergey Matyukevich 		regset_data->vlenb *= variant->vlenb_mul;
63530eb191cSSergey Matyukevich 		regset_data->vstart = variant->vstart;
63630eb191cSSergey Matyukevich 		regset_data->vtype = variant->vtype;
63730eb191cSSergey Matyukevich 		regset_data->vcsr = variant->vcsr;
63830eb191cSSergey Matyukevich 		regset_data->vl = variant->vl;
63930eb191cSSergey Matyukevich 
64030eb191cSSergey Matyukevich 		iov.iov_base = regset_data;
64130eb191cSSergey Matyukevich 		iov.iov_len = regset_size;
64230eb191cSSergey Matyukevich 
64330eb191cSSergey Matyukevich 		errno = 0;
64430eb191cSSergey Matyukevich 		ret = ptrace(PTRACE_SETREGSET, pid, NT_RISCV_VECTOR, &iov);
64530eb191cSSergey Matyukevich 		ASSERT_EQ(errno, EINVAL);
64630eb191cSSergey Matyukevich 		ASSERT_EQ(ret, -1);
64730eb191cSSergey Matyukevich 
64830eb191cSSergey Matyukevich 		/* cleanup */
64930eb191cSSergey Matyukevich 
65030eb191cSSergey Matyukevich 		ASSERT_EQ(0, kill(pid, SIGKILL));
65130eb191cSSergey Matyukevich 	}
65230eb191cSSergey Matyukevich }
65330eb191cSSergey Matyukevich 
654*849f05aeSSergey Matyukevich FIXTURE(v_csr_valid)
655*849f05aeSSergey Matyukevich {
656*849f05aeSSergey Matyukevich };
657*849f05aeSSergey Matyukevich 
658*849f05aeSSergey Matyukevich FIXTURE_SETUP(v_csr_valid)
659*849f05aeSSergey Matyukevich {
660*849f05aeSSergey Matyukevich }
661*849f05aeSSergey Matyukevich 
662*849f05aeSSergey Matyukevich FIXTURE_TEARDOWN(v_csr_valid)
663*849f05aeSSergey Matyukevich {
664*849f05aeSSergey Matyukevich }
665*849f05aeSSergey Matyukevich 
666*849f05aeSSergey Matyukevich /* modifications of the initial vsetvli settings */
667*849f05aeSSergey Matyukevich FIXTURE_VARIANT(v_csr_valid)
668*849f05aeSSergey Matyukevich {
669*849f05aeSSergey Matyukevich 	unsigned long vstart;
670*849f05aeSSergey Matyukevich 	unsigned long vl;
671*849f05aeSSergey Matyukevich 	unsigned long vtype;
672*849f05aeSSergey Matyukevich 	unsigned long vcsr;
673*849f05aeSSergey Matyukevich 	unsigned long vlenb_mul;
674*849f05aeSSergey Matyukevich 	unsigned long vlenb_min;
675*849f05aeSSergey Matyukevich 	unsigned long vlenb_max;
676*849f05aeSSergey Matyukevich 	unsigned long spec;
677*849f05aeSSergey Matyukevich };
678*849f05aeSSergey Matyukevich 
679*849f05aeSSergey Matyukevich /* valid for VLEN >= 128: LMUL= 1/4, SEW = 32 */
680*849f05aeSSergey Matyukevich FIXTURE_VARIANT_ADD(v_csr_valid, frac_lmul1)
681*849f05aeSSergey Matyukevich {
682*849f05aeSSergey Matyukevich 	.vstart = 0x0,
683*849f05aeSSergey Matyukevich 	.vl = 0x0,
684*849f05aeSSergey Matyukevich 	.vtype = 0x16,
685*849f05aeSSergey Matyukevich 	.vcsr = 0x0,
686*849f05aeSSergey Matyukevich 	.vlenb_mul = 0x1,
687*849f05aeSSergey Matyukevich 	.vlenb_min = 0x10,
688*849f05aeSSergey Matyukevich 	.vlenb_max = 0x0,
689*849f05aeSSergey Matyukevich 	.spec = VECTOR_1_0,
690*849f05aeSSergey Matyukevich };
691*849f05aeSSergey Matyukevich 
692*849f05aeSSergey Matyukevich /* valid for VLEN >= 16: LMUL= 2, SEW = 32 */
693*849f05aeSSergey Matyukevich FIXTURE_VARIANT_ADD(v_csr_valid, int_lmul1)
694*849f05aeSSergey Matyukevich {
695*849f05aeSSergey Matyukevich 	.vstart = 0x0,
696*849f05aeSSergey Matyukevich 	.vl = 0x0,
697*849f05aeSSergey Matyukevich 	.vtype = 0x11,
698*849f05aeSSergey Matyukevich 	.vcsr = 0x0,
699*849f05aeSSergey Matyukevich 	.vlenb_mul = 0x1,
700*849f05aeSSergey Matyukevich 	.vlenb_min = 0x2,
701*849f05aeSSergey Matyukevich 	.vlenb_max = 0x0,
702*849f05aeSSergey Matyukevich 	.spec = VECTOR_1_0,
703*849f05aeSSergey Matyukevich };
704*849f05aeSSergey Matyukevich 
705*849f05aeSSergey Matyukevich /* valid for XTheadVector VLEN >= 16: LMUL= 2, SEW = 32 */
706*849f05aeSSergey Matyukevich FIXTURE_VARIANT_ADD(v_csr_valid, int_lmul2)
707*849f05aeSSergey Matyukevich {
708*849f05aeSSergey Matyukevich 	.vstart = 0x0,
709*849f05aeSSergey Matyukevich 	.vl = 0x0,
710*849f05aeSSergey Matyukevich 	.vtype = 0x9,
711*849f05aeSSergey Matyukevich 	.vcsr = 0x0,
712*849f05aeSSergey Matyukevich 	.vlenb_mul = 0x1,
713*849f05aeSSergey Matyukevich 	.vlenb_min = 0x2,
714*849f05aeSSergey Matyukevich 	.vlenb_max = 0x0,
715*849f05aeSSergey Matyukevich 	.spec = XTHEAD_VECTOR_0_7,
716*849f05aeSSergey Matyukevich };
717*849f05aeSSergey Matyukevich 
718*849f05aeSSergey Matyukevich /* valid for VLEN >= 32: LMUL= 2, SEW = 32, VL = 2 */
719*849f05aeSSergey Matyukevich FIXTURE_VARIANT_ADD(v_csr_valid, int_lmul3)
720*849f05aeSSergey Matyukevich {
721*849f05aeSSergey Matyukevich 	.vstart = 0x0,
722*849f05aeSSergey Matyukevich 	.vl = 0x2,
723*849f05aeSSergey Matyukevich 	.vtype = 0x11,
724*849f05aeSSergey Matyukevich 	.vcsr = 0x0,
725*849f05aeSSergey Matyukevich 	.vlenb_mul = 0x1,
726*849f05aeSSergey Matyukevich 	.vlenb_min = 0x4,
727*849f05aeSSergey Matyukevich 	.vlenb_max = 0x0,
728*849f05aeSSergey Matyukevich 	.spec = VECTOR_1_0,
729*849f05aeSSergey Matyukevich };
730*849f05aeSSergey Matyukevich 
731*849f05aeSSergey Matyukevich TEST_F(v_csr_valid, ptrace_v_valid_values)
732*849f05aeSSergey Matyukevich {
733*849f05aeSSergey Matyukevich 	unsigned long vlenb;
734*849f05aeSSergey Matyukevich 	pid_t pid;
735*849f05aeSSergey Matyukevich 
736*849f05aeSSergey Matyukevich 	if (!is_vector_supported() && !is_xtheadvector_supported())
737*849f05aeSSergey Matyukevich 		SKIP(return, "Vectors not supported");
738*849f05aeSSergey Matyukevich 
739*849f05aeSSergey Matyukevich 	if (is_vector_supported() && !vector_test(variant->spec))
740*849f05aeSSergey Matyukevich 		SKIP(return, "Test not supported for Vector");
741*849f05aeSSergey Matyukevich 
742*849f05aeSSergey Matyukevich 	if (is_xtheadvector_supported() && !xthead_test(variant->spec))
743*849f05aeSSergey Matyukevich 		SKIP(return, "Test not supported for XTheadVector");
744*849f05aeSSergey Matyukevich 
745*849f05aeSSergey Matyukevich 	vlenb = get_vr_len();
746*849f05aeSSergey Matyukevich 
747*849f05aeSSergey Matyukevich 	if (variant->vlenb_min) {
748*849f05aeSSergey Matyukevich 		if (vlenb < variant->vlenb_min)
749*849f05aeSSergey Matyukevich 			SKIP(return, "This test does not support VLEN < %lu\n",
750*849f05aeSSergey Matyukevich 			     variant->vlenb_min * 8);
751*849f05aeSSergey Matyukevich 	}
752*849f05aeSSergey Matyukevich 	if (variant->vlenb_max) {
753*849f05aeSSergey Matyukevich 		if (vlenb > variant->vlenb_max)
754*849f05aeSSergey Matyukevich 			SKIP(return, "This test does not support VLEN > %lu\n",
755*849f05aeSSergey Matyukevich 			     variant->vlenb_max * 8);
756*849f05aeSSergey Matyukevich 	}
757*849f05aeSSergey Matyukevich 
758*849f05aeSSergey Matyukevich 	chld_lock = 1;
759*849f05aeSSergey Matyukevich 	pid = fork();
760*849f05aeSSergey Matyukevich 	ASSERT_LE(0, pid)
761*849f05aeSSergey Matyukevich 		TH_LOG("fork: %m");
762*849f05aeSSergey Matyukevich 
763*849f05aeSSergey Matyukevich 	if (pid == 0) {
764*849f05aeSSergey Matyukevich 		unsigned long vl;
765*849f05aeSSergey Matyukevich 
766*849f05aeSSergey Matyukevich 		while (chld_lock == 1)
767*849f05aeSSergey Matyukevich 			asm volatile("" : : "g"(chld_lock) : "memory");
768*849f05aeSSergey Matyukevich 
769*849f05aeSSergey Matyukevich 		if (is_xtheadvector_supported()) {
770*849f05aeSSergey Matyukevich 			asm volatile (
771*849f05aeSSergey Matyukevich 				// 0 | zimm[10:0] | rs1 | 1 1 1 | rd |1010111| vsetvli
772*849f05aeSSergey Matyukevich 				// vsetvli	t4, x0, e16, m2, d1
773*849f05aeSSergey Matyukevich 				".4byte		0b00000000010100000111111011010111\n"
774*849f05aeSSergey Matyukevich 				"mv		%[new_vl], t4\n"
775*849f05aeSSergey Matyukevich 				: [new_vl] "=r" (vl) : : "t4");
776*849f05aeSSergey Matyukevich 		} else {
777*849f05aeSSergey Matyukevich 			asm volatile (
778*849f05aeSSergey Matyukevich 				".option push\n"
779*849f05aeSSergey Matyukevich 				".option arch, +zve32x\n"
780*849f05aeSSergey Matyukevich 				"vsetvli %[new_vl], x0, e16, m2, tu, mu\n"
781*849f05aeSSergey Matyukevich 				".option pop\n"
782*849f05aeSSergey Matyukevich 				: [new_vl] "=r"(vl) : : );
783*849f05aeSSergey Matyukevich 		}
784*849f05aeSSergey Matyukevich 
785*849f05aeSSergey Matyukevich 		asm volatile (
786*849f05aeSSergey Matyukevich 			".option push\n"
787*849f05aeSSergey Matyukevich 			".option norvc\n"
788*849f05aeSSergey Matyukevich 			".option arch, +zve32x\n"
789*849f05aeSSergey Matyukevich 			"ebreak\n" /* breakpoint 1: apply new V state using ptrace */
790*849f05aeSSergey Matyukevich 			"nop\n"
791*849f05aeSSergey Matyukevich 			"ebreak\n" /* breakpoint 2: V state clean - context will not be saved */
792*849f05aeSSergey Matyukevich 			"vmv.v.i v0, -1\n"
793*849f05aeSSergey Matyukevich 			"ebreak\n" /* breakpoint 3: V state dirty - context will be saved */
794*849f05aeSSergey Matyukevich 			".option pop\n");
795*849f05aeSSergey Matyukevich 	} else {
796*849f05aeSSergey Matyukevich 		struct __riscv_v_regset_state *regset_data;
797*849f05aeSSergey Matyukevich 		struct user_regs_struct regs;
798*849f05aeSSergey Matyukevich 		size_t regset_size;
799*849f05aeSSergey Matyukevich 		struct iovec iov;
800*849f05aeSSergey Matyukevich 		int status;
801*849f05aeSSergey Matyukevich 
802*849f05aeSSergey Matyukevich 		/* attach */
803*849f05aeSSergey Matyukevich 
804*849f05aeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_ATTACH, pid, NULL, NULL));
805*849f05aeSSergey Matyukevich 		ASSERT_EQ(pid, waitpid(pid, &status, 0));
806*849f05aeSSergey Matyukevich 		ASSERT_TRUE(WIFSTOPPED(status));
807*849f05aeSSergey Matyukevich 
808*849f05aeSSergey Matyukevich 		/* unlock */
809*849f05aeSSergey Matyukevich 
810*849f05aeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_POKEDATA, pid, &chld_lock, 0));
811*849f05aeSSergey Matyukevich 
812*849f05aeSSergey Matyukevich 		/* resume and wait for the 1st ebreak */
813*849f05aeSSergey Matyukevich 
814*849f05aeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_CONT, pid, NULL, NULL));
815*849f05aeSSergey Matyukevich 		ASSERT_EQ(pid, waitpid(pid, &status, 0));
816*849f05aeSSergey Matyukevich 		ASSERT_TRUE(WIFSTOPPED(status));
817*849f05aeSSergey Matyukevich 
818*849f05aeSSergey Matyukevich 		/* read tracee vector csr regs using ptrace GETREGSET */
819*849f05aeSSergey Matyukevich 
820*849f05aeSSergey Matyukevich 		regset_size = sizeof(*regset_data) + vlenb * 32;
821*849f05aeSSergey Matyukevich 		regset_data = calloc(1, regset_size);
822*849f05aeSSergey Matyukevich 
823*849f05aeSSergey Matyukevich 		iov.iov_base = regset_data;
824*849f05aeSSergey Matyukevich 		iov.iov_len = regset_size;
825*849f05aeSSergey Matyukevich 
826*849f05aeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_GETREGSET, pid, NT_RISCV_VECTOR, &iov));
827*849f05aeSSergey Matyukevich 
828*849f05aeSSergey Matyukevich 		/* verify initial vsetvli settings */
829*849f05aeSSergey Matyukevich 
830*849f05aeSSergey Matyukevich 		if (is_xtheadvector_supported())
831*849f05aeSSergey Matyukevich 			EXPECT_EQ(5UL, regset_data->vtype);
832*849f05aeSSergey Matyukevich 		else
833*849f05aeSSergey Matyukevich 			EXPECT_EQ(9UL, regset_data->vtype);
834*849f05aeSSergey Matyukevich 
835*849f05aeSSergey Matyukevich 		EXPECT_EQ(regset_data->vlenb, regset_data->vl);
836*849f05aeSSergey Matyukevich 		EXPECT_EQ(vlenb, regset_data->vlenb);
837*849f05aeSSergey Matyukevich 		EXPECT_EQ(0UL, regset_data->vstart);
838*849f05aeSSergey Matyukevich 		EXPECT_EQ(0UL, regset_data->vcsr);
839*849f05aeSSergey Matyukevich 
840*849f05aeSSergey Matyukevich 		/* apply valid settings from fixture variants */
841*849f05aeSSergey Matyukevich 
842*849f05aeSSergey Matyukevich 		regset_data->vlenb *= variant->vlenb_mul;
843*849f05aeSSergey Matyukevich 		regset_data->vstart = variant->vstart;
844*849f05aeSSergey Matyukevich 		regset_data->vtype = variant->vtype;
845*849f05aeSSergey Matyukevich 		regset_data->vcsr = variant->vcsr;
846*849f05aeSSergey Matyukevich 		regset_data->vl = variant->vl;
847*849f05aeSSergey Matyukevich 
848*849f05aeSSergey Matyukevich 		iov.iov_base = regset_data;
849*849f05aeSSergey Matyukevich 		iov.iov_len = regset_size;
850*849f05aeSSergey Matyukevich 
851*849f05aeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_SETREGSET, pid, NT_RISCV_VECTOR, &iov));
852*849f05aeSSergey Matyukevich 
853*849f05aeSSergey Matyukevich 		/* skip 1st ebreak, then resume and wait for the 2nd ebreak */
854*849f05aeSSergey Matyukevich 
855*849f05aeSSergey Matyukevich 		iov.iov_base = &regs;
856*849f05aeSSergey Matyukevich 		iov.iov_len = sizeof(regs);
857*849f05aeSSergey Matyukevich 
858*849f05aeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_GETREGSET, pid, NT_PRSTATUS, &iov));
859*849f05aeSSergey Matyukevich 		regs.pc += 4;
860*849f05aeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_SETREGSET, pid, NT_PRSTATUS, &iov));
861*849f05aeSSergey Matyukevich 
862*849f05aeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_CONT, pid, NULL, NULL));
863*849f05aeSSergey Matyukevich 		ASSERT_EQ(pid, waitpid(pid, &status, 0));
864*849f05aeSSergey Matyukevich 		ASSERT_TRUE(WIFSTOPPED(status));
865*849f05aeSSergey Matyukevich 
866*849f05aeSSergey Matyukevich 		/* read tracee vector csr regs using ptrace GETREGSET */
867*849f05aeSSergey Matyukevich 
868*849f05aeSSergey Matyukevich 		iov.iov_base = regset_data;
869*849f05aeSSergey Matyukevich 		iov.iov_len = regset_size;
870*849f05aeSSergey Matyukevich 
871*849f05aeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_GETREGSET, pid, NT_RISCV_VECTOR, &iov));
872*849f05aeSSergey Matyukevich 
873*849f05aeSSergey Matyukevich 		/* verify vector csr regs from tracee context */
874*849f05aeSSergey Matyukevich 
875*849f05aeSSergey Matyukevich 		EXPECT_EQ(regset_data->vstart, variant->vstart);
876*849f05aeSSergey Matyukevich 		EXPECT_EQ(regset_data->vtype, variant->vtype);
877*849f05aeSSergey Matyukevich 		EXPECT_EQ(regset_data->vcsr, variant->vcsr);
878*849f05aeSSergey Matyukevich 		EXPECT_EQ(regset_data->vl, variant->vl);
879*849f05aeSSergey Matyukevich 		EXPECT_EQ(regset_data->vlenb, vlenb);
880*849f05aeSSergey Matyukevich 
881*849f05aeSSergey Matyukevich 		/* skip 2nd ebreak, then resume and wait for the 3rd ebreak */
882*849f05aeSSergey Matyukevich 
883*849f05aeSSergey Matyukevich 		iov.iov_base = &regs;
884*849f05aeSSergey Matyukevich 		iov.iov_len = sizeof(regs);
885*849f05aeSSergey Matyukevich 
886*849f05aeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_GETREGSET, pid, NT_PRSTATUS, &iov));
887*849f05aeSSergey Matyukevich 		regs.pc += 4;
888*849f05aeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_SETREGSET, pid, NT_PRSTATUS, &iov));
889*849f05aeSSergey Matyukevich 
890*849f05aeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_CONT, pid, NULL, NULL));
891*849f05aeSSergey Matyukevich 		ASSERT_EQ(pid, waitpid(pid, &status, 0));
892*849f05aeSSergey Matyukevich 		ASSERT_TRUE(WIFSTOPPED(status));
893*849f05aeSSergey Matyukevich 
894*849f05aeSSergey Matyukevich 		/* read tracee vector csr regs using ptrace GETREGSET */
895*849f05aeSSergey Matyukevich 
896*849f05aeSSergey Matyukevich 		iov.iov_base = regset_data;
897*849f05aeSSergey Matyukevich 		iov.iov_len = regset_size;
898*849f05aeSSergey Matyukevich 
899*849f05aeSSergey Matyukevich 		ASSERT_EQ(0, ptrace(PTRACE_GETREGSET, pid, NT_RISCV_VECTOR, &iov));
900*849f05aeSSergey Matyukevich 
901*849f05aeSSergey Matyukevich 		/* verify vector csr regs from tracee context */
902*849f05aeSSergey Matyukevich 
903*849f05aeSSergey Matyukevich 		EXPECT_EQ(regset_data->vstart, variant->vstart);
904*849f05aeSSergey Matyukevich 		EXPECT_EQ(regset_data->vtype, variant->vtype);
905*849f05aeSSergey Matyukevich 		EXPECT_EQ(regset_data->vcsr, variant->vcsr);
906*849f05aeSSergey Matyukevich 		EXPECT_EQ(regset_data->vl, variant->vl);
907*849f05aeSSergey Matyukevich 		EXPECT_EQ(regset_data->vlenb, vlenb);
908*849f05aeSSergey Matyukevich 
909*849f05aeSSergey Matyukevich 		/* cleanup */
910*849f05aeSSergey Matyukevich 
911*849f05aeSSergey Matyukevich 		ASSERT_EQ(0, kill(pid, SIGKILL));
912*849f05aeSSergey Matyukevich 	}
913*849f05aeSSergey Matyukevich }
914*849f05aeSSergey Matyukevich 
915600f72deSSergey Matyukevich TEST_HARNESS_MAIN
916