xref: /linux/arch/arm/kernel/hyp-stub.S (revision a36e9f5cfe9eb3a1dce8769c7058251c42705357)
1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * Copyright (c) 2012 Linaro Limited.
4 */
5
6#include <linux/init.h>
7#include <linux/irqchip/arm-gic-v3.h>
8#include <linux/linkage.h>
9#include <asm/assembler.h>
10#include <asm/virt.h>
11
12.arch armv7-a
13
14#ifndef ZIMAGE
15/*
16 * For the kernel proper, we need to find out the CPU boot mode long after
17 * boot, so we need to store it in a writable variable.
18 *
19 * This is not in .bss, because we set it sufficiently early that the boot-time
20 * zeroing of .bss would clobber it.
21 */
22.data
23	.align	2
24ENTRY(__boot_cpu_mode)
25	.long	0
26.text
27
28	/*
29	 * Save the primary CPU boot mode. Requires 2 scratch registers.
30	 */
31	.macro	store_primary_cpu_mode	reg1, reg2
32	mrs	\reg1, cpsr
33	and	\reg1, \reg1, #MODE_MASK
34	str_l	\reg1, __boot_cpu_mode, \reg2
35	.endm
36
37	/*
38	 * Compare the current mode with the one saved on the primary CPU.
39	 * If they don't match, record that fact. The Z bit indicates
40	 * if there's a match or not.
41	 * Requires 2 additional scratch registers.
42	 */
43	.macro	compare_cpu_mode_with_primary mode, reg1, reg2
44	adr_l	\reg2, __boot_cpu_mode
45	ldr	\reg1, [\reg2]
46	cmp	\mode, \reg1		@ matches primary CPU boot mode?
47	orrne	\reg1, \reg1, #BOOT_CPU_MODE_MISMATCH
48	strne	\reg1, [\reg2]		@ record what happened and give up
49	.endm
50
51#else	/* ZIMAGE */
52
53	.macro	store_primary_cpu_mode	reg1:req, reg2:req
54	.endm
55
56/*
57 * The zImage loader only runs on one CPU, so we don't bother with mult-CPU
58 * consistency checking:
59 */
60	.macro	compare_cpu_mode_with_primary mode, reg1, reg2
61	cmp	\mode, \mode
62	.endm
63
64#endif /* ZIMAGE */
65
66/*
67 * Hypervisor stub installation functions.
68 *
69 * These must be called with the MMU and D-cache off.
70 * They are not ABI compliant and are only intended to be called from the kernel
71 * entry points in head.S.
72 */
73@ Call this from the primary CPU
74ENTRY(__hyp_stub_install)
75	store_primary_cpu_mode	r4, r5
76ENDPROC(__hyp_stub_install)
77
78	@ fall through...
79
80@ Secondary CPUs should call here
81ENTRY(__hyp_stub_install_secondary)
82	mrs	r4, cpsr
83	and	r4, r4, #MODE_MASK
84
85	/*
86	 * If the secondary has booted with a different mode, give up
87	 * immediately.
88	 */
89	compare_cpu_mode_with_primary	r4, r5, r6
90	retne	lr
91
92	/*
93	 * Once we have given up on one CPU, we do not try to install the
94	 * stub hypervisor on the remaining ones: because the saved boot mode
95	 * is modified, it can't compare equal to the CPSR mode field any
96	 * more.
97	 *
98	 * Otherwise...
99	 */
100
101	cmp	r4, #HYP_MODE
102	retne	lr			@ give up if the CPU is not in HYP mode
103
104/*
105 * Configure HSCTLR to set correct exception endianness/instruction set
106 * state etc.
107 * Turn off all traps
108 * Eventually, CPU-specific code might be needed -- assume not for now
109 *
110 * This code relies on the "eret" instruction to synchronize the
111 * various coprocessor accesses. This is done when we switch to SVC
112 * (see safe_svcmode_maskall).
113 */
114	@ Now install the hypervisor stub:
115	W(adr)	r7, __hyp_stub_vectors
116	mcr	p15, 4, r7, c12, c0, 0	@ set hypervisor vector base (HVBAR)
117
118	@ Disable all traps, so we don't get any nasty surprise
119	mov	r7, #0
120	mcr	p15, 4, r7, c1, c1, 0	@ HCR
121	mcr	p15, 4, r7, c1, c1, 2	@ HCPTR
122	mcr	p15, 4, r7, c1, c1, 3	@ HSTR
123
124THUMB(	orr	r7, #(1 << 30)	)	@ HSCTLR.TE
125ARM_BE8(orr	r7, r7, #(1 << 25))     @ HSCTLR.EE
126	mcr	p15, 4, r7, c1, c0, 0	@ HSCTLR
127
128	mrc	p15, 4, r7, c1, c1, 1	@ HDCR
129	and	r7, #0x1f		@ Preserve HPMN
130	mcr	p15, 4, r7, c1, c1, 1	@ HDCR
131
132	@ Make sure NS-SVC is initialised appropriately
133	mrc	p15, 0, r7, c1, c0, 0	@ SCTLR
134	orr	r7, #(1 << 5)		@ CP15 barriers enabled
135	bic	r7, #(3 << 7)		@ Clear SED/ITD for v8 (RES0 for v7)
136	bic	r7, #(3 << 19)		@ WXN and UWXN disabled
137	mcr	p15, 0, r7, c1, c0, 0	@ SCTLR
138
139	mrc	p15, 0, r7, c0, c0, 0	@ MIDR
140	mcr	p15, 4, r7, c0, c0, 0	@ VPIDR
141
142	mrc	p15, 0, r7, c0, c0, 5	@ MPIDR
143	mcr	p15, 4, r7, c0, c0, 5	@ VMPIDR
144
145#if !defined(ZIMAGE) && defined(CONFIG_ARM_ARCH_TIMER)
146	@ make CNTP_* and CNTPCT accessible from PL1
147	mrc	p15, 0, r7, c0, c1, 1	@ ID_PFR1
148	ubfx	r7, r7, #16, #4
149	teq	r7, #0
150	beq	1f
151	mrc	p15, 4, r7, c14, c1, 0	@ CNTHCTL
152	orr	r7, r7, #3		@ PL1PCEN | PL1PCTEN
153	mcr	p15, 4, r7, c14, c1, 0	@ CNTHCTL
154	mov	r7, #0
155	mcrr	p15, 4, r7, r7, c14	@ CNTVOFF
156
157	@ Disable virtual timer in case it was counting
158	mrc	p15, 0, r7, c14, c3, 1	@ CNTV_CTL
159	bic	r7, #1			@ Clear ENABLE
160	mcr	p15, 0, r7, c14, c3, 1	@ CNTV_CTL
1611:
162#endif
163
164#ifdef CONFIG_ARM_GIC_V3
165	@ Check whether GICv3 system registers are available
166	mrc	p15, 0, r7, c0, c1, 1	@ ID_PFR1
167	ubfx	r7, r7, #28, #4
168	teq	r7, #0
169	beq	2f
170
171	@ Enable system register accesses
172	mrc	p15, 4, r7, c12, c9, 5	@ ICC_HSRE
173	orr	r7, r7, #(ICC_SRE_EL2_ENABLE | ICC_SRE_EL2_SRE)
174	mcr	p15, 4, r7, c12, c9, 5	@ ICC_HSRE
175	isb
176
177	@ SRE bit could be forced to 0 by firmware.
178	@ Check whether it sticks before accessing any other sysreg
179	mrc	p15, 4, r7, c12, c9, 5	@ ICC_HSRE
180	tst	r7, #ICC_SRE_EL2_SRE
181	beq	2f
182	mov	r7, #0
183	mcr	p15, 4, r7, c12, c11, 0	@ ICH_HCR
1842:
185#endif
186
187	bx	lr			@ The boot CPU mode is left in r4.
188ENDPROC(__hyp_stub_install_secondary)
189
190__hyp_stub_do_trap:
191#ifdef ZIMAGE
192	teq	r0, #HVC_SET_VECTORS
193	bne	1f
194	/* Only the ZIMAGE stubs can change the HYP vectors */
195	mcr	p15, 4, r1, c12, c0, 0	@ set HVBAR
196	b	__hyp_stub_exit
197#endif
198
1991:	teq	r0, #HVC_SOFT_RESTART
200	bne	2f
201	bx	r1
202
2032:	ldr	r0, =HVC_STUB_ERR
204	__ERET
205
206__hyp_stub_exit:
207	mov	r0, #0
208	__ERET
209ENDPROC(__hyp_stub_do_trap)
210
211/*
212 * __hyp_set_vectors is only used when ZIMAGE must bounce between HYP
213 * and SVC. For the kernel itself, the vectors are set once and for
214 * all by the stubs.
215 */
216ENTRY(__hyp_set_vectors)
217	mov	r1, r0
218	mov	r0, #HVC_SET_VECTORS
219	__HVC(0)
220	ret	lr
221ENDPROC(__hyp_set_vectors)
222
223ENTRY(__hyp_soft_restart)
224	mov	r1, r0
225	mov	r0, #HVC_SOFT_RESTART
226	__HVC(0)
227	ret	lr
228ENDPROC(__hyp_soft_restart)
229
230.align 5
231ENTRY(__hyp_stub_vectors)
232__hyp_stub_reset:	W(b)	.
233__hyp_stub_und:		W(b)	.
234__hyp_stub_svc:		W(b)	.
235__hyp_stub_pabort:	W(b)	.
236__hyp_stub_dabort:	W(b)	.
237__hyp_stub_trap:	W(b)	__hyp_stub_do_trap
238__hyp_stub_irq:		W(b)	.
239__hyp_stub_fiq:		W(b)	.
240ENDPROC(__hyp_stub_vectors)
241
242