xref: /linux/arch/arm64/kernel/cpu-reset.S (revision 500d14affdf73677071c075bb9becc637b60fe39)
1d2912cb1SThomas Gleixner/* SPDX-License-Identifier: GPL-2.0-only */
2f9076ecfSGeoff Levand/*
3f9076ecfSGeoff Levand * CPU reset routines
4f9076ecfSGeoff Levand *
5f9076ecfSGeoff Levand * Copyright (C) 2001 Deep Blue Solutions Ltd.
6f9076ecfSGeoff Levand * Copyright (C) 2012 ARM Ltd.
7f9076ecfSGeoff Levand * Copyright (C) 2015 Huawei Futurewei Technologies.
8f9076ecfSGeoff Levand */
9f9076ecfSGeoff Levand
10f9076ecfSGeoff Levand#include <linux/linkage.h>
11f9076ecfSGeoff Levand#include <asm/assembler.h>
12f9076ecfSGeoff Levand#include <asm/sysreg.h>
13f9076ecfSGeoff Levand#include <asm/virt.h>
14f9076ecfSGeoff Levand
15f9076ecfSGeoff Levand.text
16439e70e2SWill Deacon.pushsection    .idmap.text, "awx"
17f9076ecfSGeoff Levand
18f9076ecfSGeoff Levand/*
19f9076ecfSGeoff Levand * __cpu_soft_restart(el2_switch, entry, arg0, arg1, arg2) - Helper for
20f9076ecfSGeoff Levand * cpu_soft_restart.
21f9076ecfSGeoff Levand *
224c9e7e64SAKASHI Takahiro * @el2_switch: Flag to indicate a switch to EL2 is needed.
23f9076ecfSGeoff Levand * @entry: Location to jump to for soft reset.
244c9e7e64SAKASHI Takahiro * arg0: First argument passed to @entry. (relocation list)
254c9e7e64SAKASHI Takahiro * arg1: Second argument passed to @entry.(physical kernel entry)
264c9e7e64SAKASHI Takahiro * arg2: Third argument passed to @entry. (physical dtb address)
27f9076ecfSGeoff Levand *
28f9076ecfSGeoff Levand * Put the CPU into the same state as it would be if it had been reset, and
29f9076ecfSGeoff Levand * branch to what would be the reset vector. It must be executed with the
30f9076ecfSGeoff Levand * flat identity mapping.
31f9076ecfSGeoff Levand */
32f9076ecfSGeoff LevandENTRY(__cpu_soft_restart)
33f9076ecfSGeoff Levand	/* Clear sctlr_el1 flags. */
34f9076ecfSGeoff Levand	mrs	x12, sctlr_el1
35f9076ecfSGeoff Levand	ldr	x13, =SCTLR_ELx_FLAGS
36f9076ecfSGeoff Levand	bic	x12, x12, x13
373060e9f0SShanker Donthineni	pre_disable_mmu_workaround
38f9076ecfSGeoff Levand	msr	sctlr_el1, x12
39f9076ecfSGeoff Levand	isb
40f9076ecfSGeoff Levand
41f9076ecfSGeoff Levand	cbz	x0, 1f				// el2_switch?
42f9076ecfSGeoff Levand	mov	x0, #HVC_SOFT_RESTART
43f9076ecfSGeoff Levand	hvc	#0				// no return
44f9076ecfSGeoff Levand
45*500d14afSArd Biesheuvel1:	mov	x8, x1				// entry
46f9076ecfSGeoff Levand	mov	x0, x2				// arg0
47f9076ecfSGeoff Levand	mov	x1, x3				// arg1
48f9076ecfSGeoff Levand	mov	x2, x4				// arg2
49*500d14afSArd Biesheuvel	br	x8
50f9076ecfSGeoff LevandENDPROC(__cpu_soft_restart)
51f9076ecfSGeoff Levand
52f9076ecfSGeoff Levand.popsection
53