xref: /freebsd/sys/riscv/vmm/vmm_switch.S (revision d3916eace506b8ab23537223f5c92924636a1c41)
1/*-
2 * SPDX-License-Identifier: BSD-2-Clause
3 *
4 * Copyright (c) 2024 Ruslan Bukin <br@bsdpad.com>
5 *
6 * This software was developed by the University of Cambridge Computer
7 * Laboratory (Department of Computer Science and Technology) under Innovate
8 * UK project 105694, "Digital Security by Design (DSbD) Technology Platform
9 * Prototype".
10 *
11 * Redistribution and use in source and binary forms, with or without
12 * modification, are permitted provided that the following conditions
13 * are met:
14 * 1. Redistributions of source code must retain the above copyright
15 *    notice, this list of conditions and the following disclaimer.
16 * 2. Redistributions in binary form must reproduce the above copyright
17 *    notice, this list of conditions and the following disclaimer in the
18 *    documentation and/or other materials provided with the distribution.
19 *
20 * THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND
21 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
23 * ARE DISCLAIMED.  IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE
24 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
26 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
27 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
28 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
29 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
30 * SUCH DAMAGE.
31 */
32
33#include <machine/asm.h>
34#include "assym.inc"
35
36	.text
37
38/*
39 * a0 == hypctx *
40 */
41ENTRY(vmm_switch)
42	sd	ra, (HYP_H_RA)(a0)
43	sd	sp, (HYP_H_SP)(a0)
44	sd	tp, (HYP_H_TP)(a0)
45	sd	gp, (HYP_H_GP)(a0)
46	sd	s0, (HYP_H_S + 0 * 8)(a0)
47	sd	s1, (HYP_H_S + 1 * 8)(a0)
48	sd	s2, (HYP_H_S + 2 * 8)(a0)
49	sd	s3, (HYP_H_S + 3 * 8)(a0)
50	sd	s4, (HYP_H_S + 4 * 8)(a0)
51	sd	s5, (HYP_H_S + 5 * 8)(a0)
52	sd	s6, (HYP_H_S + 6 * 8)(a0)
53	sd	s7, (HYP_H_S + 7 * 8)(a0)
54	sd	s8, (HYP_H_S + 8 * 8)(a0)
55	sd	s9, (HYP_H_S + 9 * 8)(a0)
56	sd	s10, (HYP_H_S + 10 * 8)(a0)
57	sd	s11, (HYP_H_S + 11 * 8)(a0)
58
59	sd	a1, (HYP_H_A + 1 * 8)(a0)
60	sd	a2, (HYP_H_A + 2 * 8)(a0)
61	sd	a3, (HYP_H_A + 3 * 8)(a0)
62	sd	a4, (HYP_H_A + 4 * 8)(a0)
63	sd	a5, (HYP_H_A + 5 * 8)(a0)
64	sd	a6, (HYP_H_A + 6 * 8)(a0)
65	sd	a7, (HYP_H_A + 7 * 8)(a0)
66
67	ld	t0, (HYP_G_SSTATUS)(a0)
68	ld	t1, (HYP_G_HSTATUS)(a0)
69	ld	t2, (HYP_G_SCOUNTEREN)(a0)
70	la	t4, .Lswitch_return
71	ld	t5, (HYP_G_SEPC)(a0)
72
73	csrrw	t0, sstatus, t0
74	csrrw	t1, hstatus, t1
75	csrrw	t2, scounteren, t2
76	csrrw	t3, sscratch, a0
77	csrrw	t4, stvec, t4
78	csrw	sepc, t5
79
80	sd	t0, (HYP_H_SSTATUS)(a0)
81	sd	t1, (HYP_H_HSTATUS)(a0)
82	sd	t2, (HYP_H_SCOUNTEREN)(a0)
83	sd	t3, (HYP_H_SSCRATCH)(a0)
84	sd	t4, (HYP_H_STVEC)(a0)
85
86	ld	ra, (HYP_G_RA)(a0)
87	ld	sp, (HYP_G_SP)(a0)
88	ld	gp, (HYP_G_GP)(a0)
89	ld	tp, (HYP_G_TP)(a0)
90	ld	t0, (HYP_G_T + 0 * 8)(a0)
91	ld	t1, (HYP_G_T + 1 * 8)(a0)
92	ld	t2, (HYP_G_T + 2 * 8)(a0)
93	ld	t3, (HYP_G_T + 3 * 8)(a0)
94	ld	t4, (HYP_G_T + 4 * 8)(a0)
95	ld	t5, (HYP_G_T + 5 * 8)(a0)
96	ld	t6, (HYP_G_T + 6 * 8)(a0)
97	ld	s0, (HYP_G_S + 0 * 8)(a0)
98	ld	s1, (HYP_G_S + 1 * 8)(a0)
99	ld	s2, (HYP_G_S + 2 * 8)(a0)
100	ld	s3, (HYP_G_S + 3 * 8)(a0)
101	ld	s4, (HYP_G_S + 4 * 8)(a0)
102	ld	s5, (HYP_G_S + 5 * 8)(a0)
103	ld	s6, (HYP_G_S + 6 * 8)(a0)
104	ld	s7, (HYP_G_S + 7 * 8)(a0)
105	ld	s8, (HYP_G_S + 8 * 8)(a0)
106	ld	s9, (HYP_G_S + 9 * 8)(a0)
107	ld	s10, (HYP_G_S + 10 * 8)(a0)
108	ld	s11, (HYP_G_S + 11 * 8)(a0)
109	/* skip a0 for now. */
110	ld	a1, (HYP_G_A + 1 * 8)(a0)
111	ld	a2, (HYP_G_A + 2 * 8)(a0)
112	ld	a3, (HYP_G_A + 3 * 8)(a0)
113	ld	a4, (HYP_G_A + 4 * 8)(a0)
114	ld	a5, (HYP_G_A + 5 * 8)(a0)
115	ld	a6, (HYP_G_A + 6 * 8)(a0)
116	ld	a7, (HYP_G_A + 7 * 8)(a0)
117	/* now load a0. */
118	ld	a0, (HYP_G_A + 0 * 8)(a0)
119
120	sret
121
122	.align 2
123.Lswitch_return:
124
125	csrrw	a0, sscratch, a0
126	sd	ra, (HYP_G_RA)(a0)
127	sd	sp, (HYP_G_SP)(a0)
128	sd	gp, (HYP_G_GP)(a0)
129	sd	tp, (HYP_G_TP)(a0)
130	sd	t0, (HYP_G_T + 0 * 8)(a0)
131	sd	t1, (HYP_G_T + 1 * 8)(a0)
132	sd	t2, (HYP_G_T + 2 * 8)(a0)
133	sd	t3, (HYP_G_T + 3 * 8)(a0)
134	sd	t4, (HYP_G_T + 4 * 8)(a0)
135	sd	t5, (HYP_G_T + 5 * 8)(a0)
136	sd	t6, (HYP_G_T + 6 * 8)(a0)
137	sd	s0, (HYP_G_S + 0 * 8)(a0)
138	sd	s1, (HYP_G_S + 1 * 8)(a0)
139	sd	s2, (HYP_G_S + 2 * 8)(a0)
140	sd	s3, (HYP_G_S + 3 * 8)(a0)
141	sd	s4, (HYP_G_S + 4 * 8)(a0)
142	sd	s5, (HYP_G_S + 5 * 8)(a0)
143	sd	s6, (HYP_G_S + 6 * 8)(a0)
144	sd	s7, (HYP_G_S + 7 * 8)(a0)
145	sd	s8, (HYP_G_S + 8 * 8)(a0)
146	sd	s9, (HYP_G_S + 9 * 8)(a0)
147	sd	s10, (HYP_G_S + 10 * 8)(a0)
148	sd	s11, (HYP_G_S + 11 * 8)(a0)
149	/* skip a0 */
150	sd	a1, (HYP_G_A + 1 * 8)(a0)
151	sd	a2, (HYP_G_A + 2 * 8)(a0)
152	sd	a3, (HYP_G_A + 3 * 8)(a0)
153	sd	a4, (HYP_G_A + 4 * 8)(a0)
154	sd	a5, (HYP_G_A + 5 * 8)(a0)
155	sd	a6, (HYP_G_A + 6 * 8)(a0)
156	sd	a7, (HYP_G_A + 7 * 8)(a0)
157
158	ld	t1, (HYP_H_STVEC)(a0)
159	ld	t2, (HYP_H_SSCRATCH)(a0)
160	ld	t3, (HYP_H_SCOUNTEREN)(a0)
161	ld	t4, (HYP_H_HSTATUS)(a0)
162	ld	t5, (HYP_H_SSTATUS)(a0)
163
164	csrr	t0, sepc
165	csrw	stvec, t1
166	csrrw	t2, sscratch, t2
167	csrrw	t3, scounteren, t3
168	csrrw	t4, hstatus, t4
169	csrrw	t5, sstatus, t5
170
171	sd	t0, (HYP_G_SEPC)(a0)
172	sd	t2, (HYP_G_A + 0 * 8)(a0)
173	sd	t3, (HYP_G_SCOUNTEREN)(a0)
174	sd	t4, (HYP_G_HSTATUS)(a0)
175	sd	t5, (HYP_G_SSTATUS)(a0)
176
177	ld	ra, (HYP_H_RA)(a0)
178	ld	sp, (HYP_H_SP)(a0)
179	ld	tp, (HYP_H_TP)(a0)
180	ld	gp, (HYP_H_GP)(a0)
181	ld	s0, (HYP_H_S + 0 * 8)(a0)
182	ld	s1, (HYP_H_S + 1 * 8)(a0)
183	ld	s2, (HYP_H_S + 2 * 8)(a0)
184	ld	s3, (HYP_H_S + 3 * 8)(a0)
185	ld	s4, (HYP_H_S + 4 * 8)(a0)
186	ld	s5, (HYP_H_S + 5 * 8)(a0)
187	ld	s6, (HYP_H_S + 6 * 8)(a0)
188	ld	s7, (HYP_H_S + 7 * 8)(a0)
189	ld	s8, (HYP_H_S + 8 * 8)(a0)
190	ld	s9, (HYP_H_S + 9 * 8)(a0)
191	ld	s10, (HYP_H_S + 10 * 8)(a0)
192	ld	s11, (HYP_H_S + 11 * 8)(a0)
193
194	ld	a1, (HYP_H_A + 1 * 8)(a0)
195	ld	a2, (HYP_H_A + 2 * 8)(a0)
196	ld	a3, (HYP_H_A + 3 * 8)(a0)
197	ld	a4, (HYP_H_A + 4 * 8)(a0)
198	ld	a5, (HYP_H_A + 5 * 8)(a0)
199	ld	a6, (HYP_H_A + 6 * 8)(a0)
200	ld	a7, (HYP_H_A + 7 * 8)(a0)
201
202	ret
203
204END(vmm_switch)
205
206ENTRY(vmm_unpriv_trap)
207	csrr	a1, sepc
208	sd	a1, HYP_TRAP_SEPC(a0)
209	addi	a1, a1, 4	/* Next instruction after hlvx.hu */
210	csrw	sepc, a1
211	csrr	a1, scause
212	sd	a1, HYP_TRAP_SCAUSE(a0)
213	csrr	a1, stval
214	sd	a1, HYP_TRAP_STVAL(a0)
215	csrr	a1, htval
216	sd	a1, HYP_TRAP_HTVAL(a0)
217	csrr	a1, htinst
218	sd	a1, HYP_TRAP_HTINST(a0)
219	sret
220END(vmm_unpriv_trap)
221