xref: /linux/arch/arm64/kernel/cpu-reset.S (revision f9076ecfb1216a478312b1c078d04792df6d4477)
1*f9076ecfSGeoff Levand/*
2*f9076ecfSGeoff Levand * CPU reset routines
3*f9076ecfSGeoff Levand *
4*f9076ecfSGeoff Levand * Copyright (C) 2001 Deep Blue Solutions Ltd.
5*f9076ecfSGeoff Levand * Copyright (C) 2012 ARM Ltd.
6*f9076ecfSGeoff Levand * Copyright (C) 2015 Huawei Futurewei Technologies.
7*f9076ecfSGeoff Levand *
8*f9076ecfSGeoff Levand * This program is free software; you can redistribute it and/or modify
9*f9076ecfSGeoff Levand * it under the terms of the GNU General Public License version 2 as
10*f9076ecfSGeoff Levand * published by the Free Software Foundation.
11*f9076ecfSGeoff Levand */
12*f9076ecfSGeoff Levand
13*f9076ecfSGeoff Levand#include <linux/linkage.h>
14*f9076ecfSGeoff Levand#include <asm/assembler.h>
15*f9076ecfSGeoff Levand#include <asm/sysreg.h>
16*f9076ecfSGeoff Levand#include <asm/virt.h>
17*f9076ecfSGeoff Levand
18*f9076ecfSGeoff Levand.text
19*f9076ecfSGeoff Levand.pushsection    .idmap.text, "ax"
20*f9076ecfSGeoff Levand
21*f9076ecfSGeoff Levand/*
22*f9076ecfSGeoff Levand * __cpu_soft_restart(el2_switch, entry, arg0, arg1, arg2) - Helper for
23*f9076ecfSGeoff Levand * cpu_soft_restart.
24*f9076ecfSGeoff Levand *
25*f9076ecfSGeoff Levand * @el2_switch: Flag to indicate a swich to EL2 is needed.
26*f9076ecfSGeoff Levand * @entry: Location to jump to for soft reset.
27*f9076ecfSGeoff Levand * arg0: First argument passed to @entry.
28*f9076ecfSGeoff Levand * arg1: Second argument passed to @entry.
29*f9076ecfSGeoff Levand * arg2: Third argument passed to @entry.
30*f9076ecfSGeoff Levand *
31*f9076ecfSGeoff Levand * Put the CPU into the same state as it would be if it had been reset, and
32*f9076ecfSGeoff Levand * branch to what would be the reset vector. It must be executed with the
33*f9076ecfSGeoff Levand * flat identity mapping.
34*f9076ecfSGeoff Levand */
35*f9076ecfSGeoff LevandENTRY(__cpu_soft_restart)
36*f9076ecfSGeoff Levand	/* Clear sctlr_el1 flags. */
37*f9076ecfSGeoff Levand	mrs	x12, sctlr_el1
38*f9076ecfSGeoff Levand	ldr	x13, =SCTLR_ELx_FLAGS
39*f9076ecfSGeoff Levand	bic	x12, x12, x13
40*f9076ecfSGeoff Levand	msr	sctlr_el1, x12
41*f9076ecfSGeoff Levand	isb
42*f9076ecfSGeoff Levand
43*f9076ecfSGeoff Levand	cbz	x0, 1f				// el2_switch?
44*f9076ecfSGeoff Levand	mov	x0, #HVC_SOFT_RESTART
45*f9076ecfSGeoff Levand	hvc	#0				// no return
46*f9076ecfSGeoff Levand
47*f9076ecfSGeoff Levand1:	mov	x18, x1				// entry
48*f9076ecfSGeoff Levand	mov	x0, x2				// arg0
49*f9076ecfSGeoff Levand	mov	x1, x3				// arg1
50*f9076ecfSGeoff Levand	mov	x2, x4				// arg2
51*f9076ecfSGeoff Levand	br	x18
52*f9076ecfSGeoff LevandENDPROC(__cpu_soft_restart)
53*f9076ecfSGeoff Levand
54*f9076ecfSGeoff Levand.popsection
55