xref: /linux/arch/arm/mm/proc-v7m.S (revision 4853f1f6ace32c68a04287353e428c4cfc3fa8ed)
1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 *  linux/arch/arm/mm/proc-v7m.S
4 *
5 *  Copyright (C) 2008 ARM Ltd.
6 *  Copyright (C) 2001 Deep Blue Solutions Ltd.
7 *
8 *  This is the "shell" of the ARMv7-M processor support.
9 */
10#include <linux/linkage.h>
11#include <linux/cfi_types.h>
12#include <asm/assembler.h>
13#include <asm/page.h>
14#include <asm/v7m.h>
15#include "proc-macros.S"
16
17SYM_TYPED_FUNC_START(cpu_v7m_proc_init)
18	ret	lr
19SYM_FUNC_END(cpu_v7m_proc_init)
20
21SYM_TYPED_FUNC_START(cpu_v7m_proc_fin)
22	ret	lr
23SYM_FUNC_END(cpu_v7m_proc_fin)
24
25/*
26 *	cpu_v7m_reset(loc)
27 *
28 *	Perform a soft reset of the system.  Put the CPU into the
29 *	same state as it would be if it had been reset, and branch
30 *	to what would be the reset vector.
31 *
32 *	- loc   - location to jump to for soft reset
33 */
34	.align	5
35SYM_TYPED_FUNC_START(cpu_v7m_reset)
36	ret	r0
37SYM_FUNC_END(cpu_v7m_reset)
38
39/*
40 *	cpu_v7m_do_idle()
41 *
42 *	Idle the processor (eg, wait for interrupt).
43 *
44 *	IRQs are already disabled.
45 */
46SYM_TYPED_FUNC_START(cpu_v7m_do_idle)
47	wfi
48	ret	lr
49SYM_FUNC_END(cpu_v7m_do_idle)
50
51SYM_TYPED_FUNC_START(cpu_v7m_dcache_clean_area)
52	ret	lr
53SYM_FUNC_END(cpu_v7m_dcache_clean_area)
54
55/*
56 * There is no MMU, so here is nothing to do.
57 */
58SYM_TYPED_FUNC_START(cpu_v7m_switch_mm)
59	ret	lr
60SYM_FUNC_END(cpu_v7m_switch_mm)
61
62.globl	cpu_v7m_suspend_size
63.equ	cpu_v7m_suspend_size, 0
64
65#ifdef CONFIG_ARM_CPU_SUSPEND
66SYM_TYPED_FUNC_START(cpu_v7m_do_suspend)
67	ret	lr
68SYM_FUNC_END(cpu_v7m_do_suspend)
69
70SYM_TYPED_FUNC_START(cpu_v7m_do_resume)
71	ret	lr
72SYM_FUNC_END(cpu_v7m_do_resume)
73#endif
74
75SYM_TYPED_FUNC_START(cpu_cm7_dcache_clean_area)
76	dcache_line_size r2, r3
77	movw	r3, #:lower16:BASEADDR_V7M_SCB + V7M_SCB_DCCMVAC
78	movt	r3, #:upper16:BASEADDR_V7M_SCB + V7M_SCB_DCCMVAC
79
801:	str	r0, [r3]		@ clean D entry
81	add	r0, r0, r2
82	subs	r1, r1, r2
83	bhi	1b
84	dsb
85	ret	lr
86SYM_FUNC_END(cpu_cm7_dcache_clean_area)
87
88SYM_TYPED_FUNC_START(cpu_cm7_proc_fin)
89	movw	r2, #:lower16:(BASEADDR_V7M_SCB + V7M_SCB_CCR)
90	movt	r2, #:upper16:(BASEADDR_V7M_SCB + V7M_SCB_CCR)
91	ldr	r0, [r2]
92	bic	r0, r0, #(V7M_SCB_CCR_DC | V7M_SCB_CCR_IC)
93	str	r0, [r2]
94	ret	lr
95SYM_FUNC_END(cpu_cm7_proc_fin)
96
97	.section ".init.text", "ax"
98
99__v7m_cm7_setup:
100	mov	r8, #(V7M_SCB_CCR_DC | V7M_SCB_CCR_IC| V7M_SCB_CCR_BP)
101	b	__v7m_setup_cont
102/*
103 *	__v7m_setup
104 *
105 *	This should be able to cover all ARMv7-M cores.
106 */
107__v7m_setup:
108	mov	r8, 0
109
110__v7m_setup_cont:
111	@ Configure the vector table base address
112	ldr	r0, =BASEADDR_V7M_SCB
113	ldr	r12, =vector_table
114	str	r12, [r0, V7M_SCB_VTOR]
115
116	@ enable UsageFault, BusFault and MemManage fault.
117	ldr	r5, [r0, #V7M_SCB_SHCSR]
118	orr	r5, #(V7M_SCB_SHCSR_USGFAULTENA | V7M_SCB_SHCSR_BUSFAULTENA | V7M_SCB_SHCSR_MEMFAULTENA)
119	str	r5, [r0, #V7M_SCB_SHCSR]
120
121	@ Lower the priority of the SVC and PendSV exceptions
122	mov	r5, #0x80000000
123	str	r5, [r0, V7M_SCB_SHPR2]	@ set SVC priority
124	mov	r5, #0x00800000
125	str	r5, [r0, V7M_SCB_SHPR3]	@ set PendSV priority
126
127	@ SVC to switch to handler mode. Notice that this requires sp to
128	@ point to writeable memory because the processor saves
129	@ some registers to the stack.
130	badr	r1, 1f
131	ldr	r5, [r12, #11 * 4]	@ read the SVC vector entry
132	str	r1, [r12, #11 * 4]	@ write the temporary SVC vector entry
133	dsb
134	mov	r6, lr			@ save LR
135	ldr	sp, =init_thread_union + THREAD_START_SP
136	cpsie	i
137	svc	#0
1381:	cpsid	i
139	/* Calculate exc_ret */
140	orr	r10, lr, #EXC_RET_THREADMODE_PROCESSSTACK
141	ldmia	sp, {r0-r3, r12}
142	str	r5, [r12, #11 * 4]	@ restore the original SVC vector entry
143	mov	lr, r6			@ restore LR
144
145	@ Special-purpose control register
146	mov	r1, #1
147	msr	control, r1		@ Thread mode has unpriviledged access
148
149	@ Configure caches (if implemented)
150	teq     r8, #0
151	stmiane	sp, {r0-r6, lr}		@ v7m_invalidate_l1 touches r0-r6
152	blne	v7m_invalidate_l1
153	teq     r8, #0			@ re-evalutae condition
154	ldmiane	sp, {r0-r6, lr}
155
156	@ Configure the System Control Register to ensure 8-byte stack alignment
157	@ Note the STKALIGN bit is either RW or RAO.
158	ldr	r0, [r0, V7M_SCB_CCR]   @ system control register
159	orr	r0, #V7M_SCB_CCR_STKALIGN
160	orr	r0, r0, r8
161
162	ret	lr
163ENDPROC(__v7m_setup)
164
165/*
166 * Cortex-M7 processor functions
167 */
168	globl_equ	cpu_cm7_proc_init,	cpu_v7m_proc_init
169	globl_equ	cpu_cm7_reset,		cpu_v7m_reset
170	globl_equ	cpu_cm7_do_idle,	cpu_v7m_do_idle
171	globl_equ	cpu_cm7_switch_mm,	cpu_v7m_switch_mm
172
173	define_processor_functions v7m, dabort=nommu_early_abort, pabort=legacy_pabort, nommu=1
174	define_processor_functions cm7, dabort=nommu_early_abort, pabort=legacy_pabort, nommu=1
175
176	.section ".rodata"
177	string cpu_arch_name, "armv7m"
178	string cpu_elf_name "v7m"
179	string cpu_v7m_name "ARMv7-M"
180
181	.section ".proc.info.init", "a"
182
183.macro __v7m_proc name, initfunc, cache_fns = nop_cache_fns, hwcaps = 0,  proc_fns = v7m_processor_functions
184	.long	0			/* proc_info_list.__cpu_mm_mmu_flags */
185	.long	0			/* proc_info_list.__cpu_io_mmu_flags */
186	initfn	\initfunc, \name
187	.long	cpu_arch_name
188	.long	cpu_elf_name
189	.long	HWCAP_HALF | HWCAP_THUMB | HWCAP_FAST_MULT | \hwcaps
190	.long	cpu_v7m_name
191	.long   \proc_fns
192	.long	0			/* proc_info_list.tlb */
193	.long	0			/* proc_info_list.user */
194	.long	\cache_fns
195.endm
196
197	/*
198	 * Match ARM Cortex-M55 processor.
199	 */
200	.type	__v7m_cm55_proc_info, #object
201__v7m_cm55_proc_info:
202	.long	0x410fd220		/* ARM Cortex-M55 0xD22 */
203	.long	0xff0ffff0		/* Mask off revision, patch release */
204	__v7m_proc __v7m_cm55_proc_info, __v7m_cm7_setup, hwcaps = HWCAP_EDSP, cache_fns = v7m_cache_fns, proc_fns = cm7_processor_functions
205	.size	__v7m_cm55_proc_info, . - __v7m_cm55_proc_info
206
207	/*
208	 * Match ARM Cortex-M33 processor.
209	 */
210	.type	__v7m_cm33_proc_info, #object
211__v7m_cm33_proc_info:
212	.long	0x410fd210		/* ARM Cortex-M33 0xD21 */
213	.long	0xff0ffff0		/* Mask off revision, patch release */
214	__v7m_proc __v7m_cm33_proc_info, __v7m_setup, hwcaps = HWCAP_EDSP
215	.size	__v7m_cm33_proc_info, . - __v7m_cm33_proc_info
216
217	/*
218	 * Match ARM Cortex-M7 processor.
219	 */
220	.type	__v7m_cm7_proc_info, #object
221__v7m_cm7_proc_info:
222	.long	0x410fc270		/* ARM Cortex-M7 0xC27 */
223	.long	0xff0ffff0		/* Mask off revision, patch release */
224	__v7m_proc __v7m_cm7_proc_info, __v7m_cm7_setup, hwcaps = HWCAP_EDSP, cache_fns = v7m_cache_fns, proc_fns = cm7_processor_functions
225	.size	__v7m_cm7_proc_info, . - __v7m_cm7_proc_info
226
227	/*
228	 * Match ARM Cortex-M4 processor.
229	 */
230	.type	__v7m_cm4_proc_info, #object
231__v7m_cm4_proc_info:
232	.long	0x410fc240		/* ARM Cortex-M4 0xC24 */
233	.long	0xff0ffff0		/* Mask off revision, patch release */
234	__v7m_proc __v7m_cm4_proc_info, __v7m_setup, hwcaps = HWCAP_EDSP
235	.size	__v7m_cm4_proc_info, . - __v7m_cm4_proc_info
236
237	/*
238	 * Match ARM Cortex-M3 processor.
239	 */
240	.type	__v7m_cm3_proc_info, #object
241__v7m_cm3_proc_info:
242	.long	0x410fc230		/* ARM Cortex-M3 0xC23 */
243	.long	0xff0ffff0		/* Mask off revision, patch release */
244	__v7m_proc __v7m_cm3_proc_info, __v7m_setup
245	.size	__v7m_cm3_proc_info, . - __v7m_cm3_proc_info
246
247	/*
248	 * Match any ARMv7-M processor core.
249	 */
250	.type	__v7m_proc_info, #object
251__v7m_proc_info:
252	.long	0x000f0000		@ Required ID value
253	.long	0x000f0000		@ Mask for ID
254	__v7m_proc __v7m_proc_info, __v7m_setup
255	.size	__v7m_proc_info, . - __v7m_proc_info
256
257