xref: /linux/arch/arm/mm/proc-v7.S (revision b68fc09be48edbc47de1a0f3d42ef8adf6c0ac55)
1/*
2 *  linux/arch/arm/mm/proc-v7.S
3 *
4 *  Copyright (C) 2001 Deep Blue Solutions Ltd.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 *
10 *  This is the "shell" of the ARMv7 processor support.
11 */
12#include <linux/arm-smccc.h>
13#include <linux/init.h>
14#include <linux/linkage.h>
15#include <asm/assembler.h>
16#include <asm/asm-offsets.h>
17#include <asm/hwcap.h>
18#include <asm/pgtable-hwdef.h>
19#include <asm/pgtable.h>
20#include <asm/memory.h>
21
22#include "proc-macros.S"
23
24#ifdef CONFIG_ARM_LPAE
25#include "proc-v7-3level.S"
26#else
27#include "proc-v7-2level.S"
28#endif
29
30ENTRY(cpu_v7_proc_init)
31	ret	lr
32ENDPROC(cpu_v7_proc_init)
33
34ENTRY(cpu_v7_proc_fin)
35	mrc	p15, 0, r0, c1, c0, 0		@ ctrl register
36	bic	r0, r0, #0x1000			@ ...i............
37	bic	r0, r0, #0x0006			@ .............ca.
38	mcr	p15, 0, r0, c1, c0, 0		@ disable caches
39	ret	lr
40ENDPROC(cpu_v7_proc_fin)
41
42/*
43 *	cpu_v7_reset(loc, hyp)
44 *
45 *	Perform a soft reset of the system.  Put the CPU into the
46 *	same state as it would be if it had been reset, and branch
47 *	to what would be the reset vector.
48 *
49 *	- loc   - location to jump to for soft reset
50 *	- hyp   - indicate if restart occurs in HYP mode
51 *
52 *	This code must be executed using a flat identity mapping with
53 *      caches disabled.
54 */
55	.align	5
56	.pushsection	.idmap.text, "ax"
57ENTRY(cpu_v7_reset)
58	mrc	p15, 0, r2, c1, c0, 0		@ ctrl register
59	bic	r2, r2, #0x1			@ ...............m
60 THUMB(	bic	r2, r2, #1 << 30 )		@ SCTLR.TE (Thumb exceptions)
61	mcr	p15, 0, r2, c1, c0, 0		@ disable MMU
62	isb
63#ifdef CONFIG_ARM_VIRT_EXT
64	teq	r1, #0
65	bne	__hyp_soft_restart
66#endif
67	bx	r0
68ENDPROC(cpu_v7_reset)
69	.popsection
70
71/*
72 *	cpu_v7_do_idle()
73 *
74 *	Idle the processor (eg, wait for interrupt).
75 *
76 *	IRQs are already disabled.
77 */
78ENTRY(cpu_v7_do_idle)
79	dsb					@ WFI may enter a low-power mode
80	wfi
81	ret	lr
82ENDPROC(cpu_v7_do_idle)
83
84ENTRY(cpu_v7_dcache_clean_area)
85	ALT_SMP(W(nop))			@ MP extensions imply L1 PTW
86	ALT_UP_B(1f)
87	ret	lr
881:	dcache_line_size r2, r3
892:	mcr	p15, 0, r0, c7, c10, 1		@ clean D entry
90	add	r0, r0, r2
91	subs	r1, r1, r2
92	bhi	2b
93	dsb	ishst
94	ret	lr
95ENDPROC(cpu_v7_dcache_clean_area)
96
97#ifdef CONFIG_ARM_PSCI
98	.arch_extension sec
99ENTRY(cpu_v7_smc_switch_mm)
100	stmfd	sp!, {r0 - r3}
101	movw	r0, #:lower16:ARM_SMCCC_ARCH_WORKAROUND_1
102	movt	r0, #:upper16:ARM_SMCCC_ARCH_WORKAROUND_1
103	smc	#0
104	ldmfd	sp!, {r0 - r3}
105	b	cpu_v7_switch_mm
106ENDPROC(cpu_v7_smc_switch_mm)
107	.arch_extension virt
108ENTRY(cpu_v7_hvc_switch_mm)
109	stmfd	sp!, {r0 - r3}
110	movw	r0, #:lower16:ARM_SMCCC_ARCH_WORKAROUND_1
111	movt	r0, #:upper16:ARM_SMCCC_ARCH_WORKAROUND_1
112	hvc	#0
113	ldmfd	sp!, {r0 - r3}
114	b	cpu_v7_switch_mm
115ENDPROC(cpu_v7_smc_switch_mm)
116#endif
117ENTRY(cpu_v7_iciallu_switch_mm)
118	mov	r3, #0
119	mcr	p15, 0, r3, c7, c5, 0		@ ICIALLU
120	b	cpu_v7_switch_mm
121ENDPROC(cpu_v7_iciallu_switch_mm)
122ENTRY(cpu_v7_bpiall_switch_mm)
123	mov	r3, #0
124	mcr	p15, 0, r3, c7, c5, 6		@ flush BTAC/BTB
125	b	cpu_v7_switch_mm
126ENDPROC(cpu_v7_bpiall_switch_mm)
127
128	string	cpu_v7_name, "ARMv7 Processor"
129	.align
130
131/* Suspend/resume support: derived from arch/arm/mach-s5pv210/sleep.S */
132.globl	cpu_v7_suspend_size
133.equ	cpu_v7_suspend_size, 4 * 9
134#ifdef CONFIG_ARM_CPU_SUSPEND
135ENTRY(cpu_v7_do_suspend)
136	stmfd	sp!, {r4 - r11, lr}
137	mrc	p15, 0, r4, c13, c0, 0	@ FCSE/PID
138	mrc	p15, 0, r5, c13, c0, 3	@ User r/o thread ID
139	stmia	r0!, {r4 - r5}
140#ifdef CONFIG_MMU
141	mrc	p15, 0, r6, c3, c0, 0	@ Domain ID
142#ifdef CONFIG_ARM_LPAE
143	mrrc	p15, 1, r5, r7, c2	@ TTB 1
144#else
145	mrc	p15, 0, r7, c2, c0, 1	@ TTB 1
146#endif
147	mrc	p15, 0, r11, c2, c0, 2	@ TTB control register
148#endif
149	mrc	p15, 0, r8, c1, c0, 0	@ Control register
150	mrc	p15, 0, r9, c1, c0, 1	@ Auxiliary control register
151	mrc	p15, 0, r10, c1, c0, 2	@ Co-processor access control
152	stmia	r0, {r5 - r11}
153	ldmfd	sp!, {r4 - r11, pc}
154ENDPROC(cpu_v7_do_suspend)
155
156ENTRY(cpu_v7_do_resume)
157	mov	ip, #0
158	mcr	p15, 0, ip, c7, c5, 0	@ invalidate I cache
159	mcr	p15, 0, ip, c13, c0, 1	@ set reserved context ID
160	ldmia	r0!, {r4 - r5}
161	mcr	p15, 0, r4, c13, c0, 0	@ FCSE/PID
162	mcr	p15, 0, r5, c13, c0, 3	@ User r/o thread ID
163	ldmia	r0, {r5 - r11}
164#ifdef CONFIG_MMU
165	mcr	p15, 0, ip, c8, c7, 0	@ invalidate TLBs
166	mcr	p15, 0, r6, c3, c0, 0	@ Domain ID
167#ifdef CONFIG_ARM_LPAE
168	mcrr	p15, 0, r1, ip, c2	@ TTB 0
169	mcrr	p15, 1, r5, r7, c2	@ TTB 1
170#else
171	ALT_SMP(orr	r1, r1, #TTB_FLAGS_SMP)
172	ALT_UP(orr	r1, r1, #TTB_FLAGS_UP)
173	mcr	p15, 0, r1, c2, c0, 0	@ TTB 0
174	mcr	p15, 0, r7, c2, c0, 1	@ TTB 1
175#endif
176	mcr	p15, 0, r11, c2, c0, 2	@ TTB control register
177	ldr	r4, =PRRR		@ PRRR
178	ldr	r5, =NMRR		@ NMRR
179	mcr	p15, 0, r4, c10, c2, 0	@ write PRRR
180	mcr	p15, 0, r5, c10, c2, 1	@ write NMRR
181#endif	/* CONFIG_MMU */
182	mrc	p15, 0, r4, c1, c0, 1	@ Read Auxiliary control register
183	teq	r4, r9			@ Is it already set?
184	mcrne	p15, 0, r9, c1, c0, 1	@ No, so write it
185	mcr	p15, 0, r10, c1, c0, 2	@ Co-processor access control
186	isb
187	dsb
188	mov	r0, r8			@ control register
189	b	cpu_resume_mmu
190ENDPROC(cpu_v7_do_resume)
191#endif
192
193.globl	cpu_ca9mp_suspend_size
194.equ	cpu_ca9mp_suspend_size, cpu_v7_suspend_size + 4 * 2
195#ifdef CONFIG_ARM_CPU_SUSPEND
196ENTRY(cpu_ca9mp_do_suspend)
197	stmfd	sp!, {r4 - r5}
198	mrc	p15, 0, r4, c15, c0, 1		@ Diagnostic register
199	mrc	p15, 0, r5, c15, c0, 0		@ Power register
200	stmia	r0!, {r4 - r5}
201	ldmfd	sp!, {r4 - r5}
202	b	cpu_v7_do_suspend
203ENDPROC(cpu_ca9mp_do_suspend)
204
205ENTRY(cpu_ca9mp_do_resume)
206	ldmia	r0!, {r4 - r5}
207	mrc	p15, 0, r10, c15, c0, 1		@ Read Diagnostic register
208	teq	r4, r10				@ Already restored?
209	mcrne	p15, 0, r4, c15, c0, 1		@ No, so restore it
210	mrc	p15, 0, r10, c15, c0, 0		@ Read Power register
211	teq	r5, r10				@ Already restored?
212	mcrne	p15, 0, r5, c15, c0, 0		@ No, so restore it
213	b	cpu_v7_do_resume
214ENDPROC(cpu_ca9mp_do_resume)
215#endif
216
217#ifdef CONFIG_CPU_PJ4B
218	globl_equ	cpu_pj4b_switch_mm,     cpu_v7_switch_mm
219	globl_equ	cpu_pj4b_set_pte_ext,	cpu_v7_set_pte_ext
220	globl_equ	cpu_pj4b_proc_init,	cpu_v7_proc_init
221	globl_equ	cpu_pj4b_proc_fin, 	cpu_v7_proc_fin
222	globl_equ	cpu_pj4b_reset,	   	cpu_v7_reset
223#ifdef CONFIG_PJ4B_ERRATA_4742
224ENTRY(cpu_pj4b_do_idle)
225	dsb					@ WFI may enter a low-power mode
226	wfi
227	dsb					@barrier
228	ret	lr
229ENDPROC(cpu_pj4b_do_idle)
230#else
231	globl_equ	cpu_pj4b_do_idle,  	cpu_v7_do_idle
232#endif
233	globl_equ	cpu_pj4b_dcache_clean_area,	cpu_v7_dcache_clean_area
234#ifdef CONFIG_ARM_CPU_SUSPEND
235ENTRY(cpu_pj4b_do_suspend)
236	stmfd	sp!, {r6 - r10}
237	mrc	p15, 1, r6, c15, c1, 0  @ save CP15 - extra features
238	mrc	p15, 1, r7, c15, c2, 0	@ save CP15 - Aux Func Modes Ctrl 0
239	mrc	p15, 1, r8, c15, c1, 2	@ save CP15 - Aux Debug Modes Ctrl 2
240	mrc	p15, 1, r9, c15, c1, 1  @ save CP15 - Aux Debug Modes Ctrl 1
241	mrc	p15, 0, r10, c9, c14, 0  @ save CP15 - PMC
242	stmia	r0!, {r6 - r10}
243	ldmfd	sp!, {r6 - r10}
244	b cpu_v7_do_suspend
245ENDPROC(cpu_pj4b_do_suspend)
246
247ENTRY(cpu_pj4b_do_resume)
248	ldmia	r0!, {r6 - r10}
249	mcr	p15, 1, r6, c15, c1, 0  @ restore CP15 - extra features
250	mcr	p15, 1, r7, c15, c2, 0	@ restore CP15 - Aux Func Modes Ctrl 0
251	mcr	p15, 1, r8, c15, c1, 2	@ restore CP15 - Aux Debug Modes Ctrl 2
252	mcr	p15, 1, r9, c15, c1, 1  @ restore CP15 - Aux Debug Modes Ctrl 1
253	mcr	p15, 0, r10, c9, c14, 0  @ restore CP15 - PMC
254	b cpu_v7_do_resume
255ENDPROC(cpu_pj4b_do_resume)
256#endif
257.globl	cpu_pj4b_suspend_size
258.equ	cpu_pj4b_suspend_size, cpu_v7_suspend_size + 4 * 5
259
260#endif
261
262/*
263 *	__v7_setup
264 *
265 *	Initialise TLB, Caches, and MMU state ready to switch the MMU
266 *	on.  Return in r0 the new CP15 C1 control register setting.
267 *
268 *	r1, r2, r4, r5, r9, r13 must be preserved - r13 is not a stack
269 *	r4: TTBR0 (low word)
270 *	r5: TTBR0 (high word if LPAE)
271 *	r8: TTBR1
272 *	r9: Main ID register
273 *
274 *	This should be able to cover all ARMv7 cores.
275 *
276 *	It is assumed that:
277 *	- cache type register is implemented
278 */
279__v7_ca5mp_setup:
280__v7_ca9mp_setup:
281__v7_cr7mp_setup:
282__v7_cr8mp_setup:
283	mov	r10, #(1 << 0)			@ Cache/TLB ops broadcasting
284	b	1f
285__v7_ca7mp_setup:
286__v7_ca12mp_setup:
287__v7_ca15mp_setup:
288__v7_b15mp_setup:
289__v7_ca17mp_setup:
290	mov	r10, #0
2911:	adr	r0, __v7_setup_stack_ptr
292	ldr	r12, [r0]
293	add	r12, r12, r0			@ the local stack
294	stmia	r12, {r1-r6, lr}		@ v7_invalidate_l1 touches r0-r6
295	bl      v7_invalidate_l1
296	ldmia	r12, {r1-r6, lr}
297#ifdef CONFIG_SMP
298	orr	r10, r10, #(1 << 6)		@ Enable SMP/nAMP mode
299	ALT_SMP(mrc	p15, 0, r0, c1, c0, 1)
300	ALT_UP(mov	r0, r10)		@ fake it for UP
301	orr	r10, r10, r0			@ Set required bits
302	teq	r10, r0				@ Were they already set?
303	mcrne	p15, 0, r10, c1, c0, 1		@ No, update register
304#endif
305	b	__v7_setup_cont
306
307/*
308 * Errata:
309 *  r0, r10 available for use
310 *  r1, r2, r4, r5, r9, r13: must be preserved
311 *  r3: contains MIDR rX number in bits 23-20
312 *  r6: contains MIDR rXpY as 8-bit XY number
313 *  r9: MIDR
314 */
315__ca8_errata:
316#if defined(CONFIG_ARM_ERRATA_430973) && !defined(CONFIG_ARCH_MULTIPLATFORM)
317	teq	r3, #0x00100000			@ only present in r1p*
318	mrceq	p15, 0, r0, c1, c0, 1		@ read aux control register
319	orreq	r0, r0, #(1 << 6)		@ set IBE to 1
320	mcreq	p15, 0, r0, c1, c0, 1		@ write aux control register
321#endif
322#ifdef CONFIG_ARM_ERRATA_458693
323	teq	r6, #0x20			@ only present in r2p0
324	mrceq	p15, 0, r0, c1, c0, 1		@ read aux control register
325	orreq	r0, r0, #(1 << 5)		@ set L1NEON to 1
326	orreq	r0, r0, #(1 << 9)		@ set PLDNOP to 1
327	mcreq	p15, 0, r0, c1, c0, 1		@ write aux control register
328#endif
329#ifdef CONFIG_ARM_ERRATA_460075
330	teq	r6, #0x20			@ only present in r2p0
331	mrceq	p15, 1, r0, c9, c0, 2		@ read L2 cache aux ctrl register
332	tsteq	r0, #1 << 22
333	orreq	r0, r0, #(1 << 22)		@ set the Write Allocate disable bit
334	mcreq	p15, 1, r0, c9, c0, 2		@ write the L2 cache aux ctrl register
335#endif
336	b	__errata_finish
337
338__ca9_errata:
339#ifdef CONFIG_ARM_ERRATA_742230
340	cmp	r6, #0x22			@ only present up to r2p2
341	mrcle	p15, 0, r0, c15, c0, 1		@ read diagnostic register
342	orrle	r0, r0, #1 << 4			@ set bit #4
343	mcrle	p15, 0, r0, c15, c0, 1		@ write diagnostic register
344#endif
345#ifdef CONFIG_ARM_ERRATA_742231
346	teq	r6, #0x20			@ present in r2p0
347	teqne	r6, #0x21			@ present in r2p1
348	teqne	r6, #0x22			@ present in r2p2
349	mrceq	p15, 0, r0, c15, c0, 1		@ read diagnostic register
350	orreq	r0, r0, #1 << 12		@ set bit #12
351	orreq	r0, r0, #1 << 22		@ set bit #22
352	mcreq	p15, 0, r0, c15, c0, 1		@ write diagnostic register
353#endif
354#ifdef CONFIG_ARM_ERRATA_743622
355	teq	r3, #0x00200000			@ only present in r2p*
356	mrceq	p15, 0, r0, c15, c0, 1		@ read diagnostic register
357	orreq	r0, r0, #1 << 6			@ set bit #6
358	mcreq	p15, 0, r0, c15, c0, 1		@ write diagnostic register
359#endif
360#if defined(CONFIG_ARM_ERRATA_751472) && defined(CONFIG_SMP)
361	ALT_SMP(cmp r6, #0x30)			@ present prior to r3p0
362	ALT_UP_B(1f)
363	mrclt	p15, 0, r0, c15, c0, 1		@ read diagnostic register
364	orrlt	r0, r0, #1 << 11		@ set bit #11
365	mcrlt	p15, 0, r0, c15, c0, 1		@ write diagnostic register
3661:
367#endif
368	b	__errata_finish
369
370__ca15_errata:
371#ifdef CONFIG_ARM_ERRATA_773022
372	cmp	r6, #0x4			@ only present up to r0p4
373	mrcle	p15, 0, r0, c1, c0, 1		@ read aux control register
374	orrle	r0, r0, #1 << 1			@ disable loop buffer
375	mcrle	p15, 0, r0, c1, c0, 1		@ write aux control register
376#endif
377	b	__errata_finish
378
379__ca12_errata:
380#ifdef CONFIG_ARM_ERRATA_818325_852422
381	mrc	p15, 0, r10, c15, c0, 1		@ read diagnostic register
382	orr	r10, r10, #1 << 12		@ set bit #12
383	mcr	p15, 0, r10, c15, c0, 1		@ write diagnostic register
384#endif
385#ifdef CONFIG_ARM_ERRATA_821420
386	mrc	p15, 0, r10, c15, c0, 2		@ read internal feature reg
387	orr	r10, r10, #1 << 1		@ set bit #1
388	mcr	p15, 0, r10, c15, c0, 2		@ write internal feature reg
389#endif
390#ifdef CONFIG_ARM_ERRATA_825619
391	mrc	p15, 0, r10, c15, c0, 1		@ read diagnostic register
392	orr	r10, r10, #1 << 24		@ set bit #24
393	mcr	p15, 0, r10, c15, c0, 1		@ write diagnostic register
394#endif
395	b	__errata_finish
396
397__ca17_errata:
398#ifdef CONFIG_ARM_ERRATA_852421
399	cmp	r6, #0x12			@ only present up to r1p2
400	mrcle	p15, 0, r10, c15, c0, 1		@ read diagnostic register
401	orrle	r10, r10, #1 << 24		@ set bit #24
402	mcrle	p15, 0, r10, c15, c0, 1		@ write diagnostic register
403#endif
404#ifdef CONFIG_ARM_ERRATA_852423
405	cmp	r6, #0x12			@ only present up to r1p2
406	mrcle	p15, 0, r10, c15, c0, 1		@ read diagnostic register
407	orrle	r10, r10, #1 << 12		@ set bit #12
408	mcrle	p15, 0, r10, c15, c0, 1		@ write diagnostic register
409#endif
410	b	__errata_finish
411
412__v7_pj4b_setup:
413#ifdef CONFIG_CPU_PJ4B
414
415/* Auxiliary Debug Modes Control 1 Register */
416#define PJ4B_STATIC_BP (1 << 2) /* Enable Static BP */
417#define PJ4B_INTER_PARITY (1 << 8) /* Disable Internal Parity Handling */
418#define PJ4B_CLEAN_LINE (1 << 16) /* Disable data transfer for clean line */
419
420/* Auxiliary Debug Modes Control 2 Register */
421#define PJ4B_FAST_LDR (1 << 23) /* Disable fast LDR */
422#define PJ4B_SNOOP_DATA (1 << 25) /* Do not interleave write and snoop data */
423#define PJ4B_CWF (1 << 27) /* Disable Critical Word First feature */
424#define PJ4B_OUTSDNG_NC (1 << 29) /* Disable outstanding non cacheable rqst */
425#define PJ4B_L1_REP_RR (1 << 30) /* L1 replacement - Strict round robin */
426#define PJ4B_AUX_DBG_CTRL2 (PJ4B_SNOOP_DATA | PJ4B_CWF |\
427			    PJ4B_OUTSDNG_NC | PJ4B_L1_REP_RR)
428
429/* Auxiliary Functional Modes Control Register 0 */
430#define PJ4B_SMP_CFB (1 << 1) /* Set SMP mode. Join the coherency fabric */
431#define PJ4B_L1_PAR_CHK (1 << 2) /* Support L1 parity checking */
432#define PJ4B_BROADCAST_CACHE (1 << 8) /* Broadcast Cache and TLB maintenance */
433
434/* Auxiliary Debug Modes Control 0 Register */
435#define PJ4B_WFI_WFE (1 << 22) /* WFI/WFE - serve the DVM and back to idle */
436
437	/* Auxiliary Debug Modes Control 1 Register */
438	mrc	p15, 1,	r0, c15, c1, 1
439	orr     r0, r0, #PJ4B_CLEAN_LINE
440	orr     r0, r0, #PJ4B_INTER_PARITY
441	bic	r0, r0, #PJ4B_STATIC_BP
442	mcr	p15, 1,	r0, c15, c1, 1
443
444	/* Auxiliary Debug Modes Control 2 Register */
445	mrc	p15, 1,	r0, c15, c1, 2
446	bic	r0, r0, #PJ4B_FAST_LDR
447	orr	r0, r0, #PJ4B_AUX_DBG_CTRL2
448	mcr	p15, 1,	r0, c15, c1, 2
449
450	/* Auxiliary Functional Modes Control Register 0 */
451	mrc	p15, 1,	r0, c15, c2, 0
452#ifdef CONFIG_SMP
453	orr	r0, r0, #PJ4B_SMP_CFB
454#endif
455	orr	r0, r0, #PJ4B_L1_PAR_CHK
456	orr	r0, r0, #PJ4B_BROADCAST_CACHE
457	mcr	p15, 1,	r0, c15, c2, 0
458
459	/* Auxiliary Debug Modes Control 0 Register */
460	mrc	p15, 1,	r0, c15, c1, 0
461	orr	r0, r0, #PJ4B_WFI_WFE
462	mcr	p15, 1,	r0, c15, c1, 0
463
464#endif /* CONFIG_CPU_PJ4B */
465
466__v7_setup:
467	adr	r0, __v7_setup_stack_ptr
468	ldr	r12, [r0]
469	add	r12, r12, r0			@ the local stack
470	stmia	r12, {r1-r6, lr}		@ v7_invalidate_l1 touches r0-r6
471	bl      v7_invalidate_l1
472	ldmia	r12, {r1-r6, lr}
473
474__v7_setup_cont:
475	and	r0, r9, #0xff000000		@ ARM?
476	teq	r0, #0x41000000
477	bne	__errata_finish
478	and	r3, r9, #0x00f00000		@ variant
479	and	r6, r9, #0x0000000f		@ revision
480	orr	r6, r6, r3, lsr #20-4		@ combine variant and revision
481	ubfx	r0, r9, #4, #12			@ primary part number
482
483	/* Cortex-A8 Errata */
484	ldr	r10, =0x00000c08		@ Cortex-A8 primary part number
485	teq	r0, r10
486	beq	__ca8_errata
487
488	/* Cortex-A9 Errata */
489	ldr	r10, =0x00000c09		@ Cortex-A9 primary part number
490	teq	r0, r10
491	beq	__ca9_errata
492
493	/* Cortex-A12 Errata */
494	ldr	r10, =0x00000c0d		@ Cortex-A12 primary part number
495	teq	r0, r10
496	beq	__ca12_errata
497
498	/* Cortex-A17 Errata */
499	ldr	r10, =0x00000c0e		@ Cortex-A17 primary part number
500	teq	r0, r10
501	beq	__ca17_errata
502
503	/* Cortex-A15 Errata */
504	ldr	r10, =0x00000c0f		@ Cortex-A15 primary part number
505	teq	r0, r10
506	beq	__ca15_errata
507
508__errata_finish:
509	mov	r10, #0
510	mcr	p15, 0, r10, c7, c5, 0		@ I+BTB cache invalidate
511#ifdef CONFIG_MMU
512	mcr	p15, 0, r10, c8, c7, 0		@ invalidate I + D TLBs
513	v7_ttb_setup r10, r4, r5, r8, r3	@ TTBCR, TTBRx setup
514	ldr	r3, =PRRR			@ PRRR
515	ldr	r6, =NMRR			@ NMRR
516	mcr	p15, 0, r3, c10, c2, 0		@ write PRRR
517	mcr	p15, 0, r6, c10, c2, 1		@ write NMRR
518#endif
519	dsb					@ Complete invalidations
520#ifndef CONFIG_ARM_THUMBEE
521	mrc	p15, 0, r0, c0, c1, 0		@ read ID_PFR0 for ThumbEE
522	and	r0, r0, #(0xf << 12)		@ ThumbEE enabled field
523	teq	r0, #(1 << 12)			@ check if ThumbEE is present
524	bne	1f
525	mov	r3, #0
526	mcr	p14, 6, r3, c1, c0, 0		@ Initialize TEEHBR to 0
527	mrc	p14, 6, r0, c0, c0, 0		@ load TEECR
528	orr	r0, r0, #1			@ set the 1st bit in order to
529	mcr	p14, 6, r0, c0, c0, 0		@ stop userspace TEEHBR access
5301:
531#endif
532	adr	r3, v7_crval
533	ldmia	r3, {r3, r6}
534 ARM_BE8(orr	r6, r6, #1 << 25)		@ big-endian page tables
535#ifdef CONFIG_SWP_EMULATE
536	orr     r3, r3, #(1 << 10)              @ set SW bit in "clear"
537	bic     r6, r6, #(1 << 10)              @ clear it in "mmuset"
538#endif
539   	mrc	p15, 0, r0, c1, c0, 0		@ read control register
540	bic	r0, r0, r3			@ clear bits them
541	orr	r0, r0, r6			@ set them
542 THUMB(	orr	r0, r0, #1 << 30	)	@ Thumb exceptions
543	ret	lr				@ return to head.S:__ret
544
545	.align	2
546__v7_setup_stack_ptr:
547	.word	PHYS_RELATIVE(__v7_setup_stack, .)
548ENDPROC(__v7_setup)
549
550	.bss
551	.align	2
552__v7_setup_stack:
553	.space	4 * 7				@ 7 registers
554
555	__INITDATA
556
557	.weak cpu_v7_bugs_init
558
559	@ define struct processor (see <asm/proc-fns.h> and proc-macros.S)
560	define_processor_functions v7, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_bugs_init
561
562#ifdef CONFIG_HARDEN_BRANCH_PREDICTOR
563	@ generic v7 bpiall on context switch
564	globl_equ	cpu_v7_bpiall_proc_init,	cpu_v7_proc_init
565	globl_equ	cpu_v7_bpiall_proc_fin,		cpu_v7_proc_fin
566	globl_equ	cpu_v7_bpiall_reset,		cpu_v7_reset
567	globl_equ	cpu_v7_bpiall_do_idle,		cpu_v7_do_idle
568	globl_equ	cpu_v7_bpiall_dcache_clean_area, cpu_v7_dcache_clean_area
569	globl_equ	cpu_v7_bpiall_set_pte_ext,	cpu_v7_set_pte_ext
570	globl_equ	cpu_v7_bpiall_suspend_size,	cpu_v7_suspend_size
571#ifdef CONFIG_ARM_CPU_SUSPEND
572	globl_equ	cpu_v7_bpiall_do_suspend,	cpu_v7_do_suspend
573	globl_equ	cpu_v7_bpiall_do_resume,	cpu_v7_do_resume
574#endif
575	define_processor_functions v7_bpiall, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_bugs_init
576
577#define HARDENED_BPIALL_PROCESSOR_FUNCTIONS v7_bpiall_processor_functions
578#else
579#define HARDENED_BPIALL_PROCESSOR_FUNCTIONS v7_processor_functions
580#endif
581
582#ifndef CONFIG_ARM_LPAE
583	@ Cortex-A8 - always needs bpiall switch_mm implementation
584	globl_equ	cpu_ca8_proc_init,	cpu_v7_proc_init
585	globl_equ	cpu_ca8_proc_fin,	cpu_v7_proc_fin
586	globl_equ	cpu_ca8_reset,		cpu_v7_reset
587	globl_equ	cpu_ca8_do_idle,	cpu_v7_do_idle
588	globl_equ	cpu_ca8_dcache_clean_area, cpu_v7_dcache_clean_area
589	globl_equ	cpu_ca8_set_pte_ext,	cpu_v7_set_pte_ext
590	globl_equ	cpu_ca8_switch_mm,	cpu_v7_bpiall_switch_mm
591	globl_equ	cpu_ca8_suspend_size,	cpu_v7_suspend_size
592#ifdef CONFIG_ARM_CPU_SUSPEND
593	globl_equ	cpu_ca8_do_suspend,	cpu_v7_do_suspend
594	globl_equ	cpu_ca8_do_resume,	cpu_v7_do_resume
595#endif
596	define_processor_functions ca8, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_ca8_ibe
597
598	@ Cortex-A9 - needs more registers preserved across suspend/resume
599	@ and bpiall switch_mm for hardening
600	globl_equ	cpu_ca9mp_proc_init,	cpu_v7_proc_init
601	globl_equ	cpu_ca9mp_proc_fin,	cpu_v7_proc_fin
602	globl_equ	cpu_ca9mp_reset,	cpu_v7_reset
603	globl_equ	cpu_ca9mp_do_idle,	cpu_v7_do_idle
604	globl_equ	cpu_ca9mp_dcache_clean_area, cpu_v7_dcache_clean_area
605#ifdef CONFIG_HARDEN_BRANCH_PREDICTOR
606	globl_equ	cpu_ca9mp_switch_mm,	cpu_v7_bpiall_switch_mm
607#else
608	globl_equ	cpu_ca9mp_switch_mm,	cpu_v7_switch_mm
609#endif
610	globl_equ	cpu_ca9mp_set_pte_ext,	cpu_v7_set_pte_ext
611	define_processor_functions ca9mp, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_bugs_init
612#endif
613
614	@ Cortex-A15 - needs iciallu switch_mm for hardening
615	globl_equ	cpu_ca15_proc_init,	cpu_v7_proc_init
616	globl_equ	cpu_ca15_proc_fin,	cpu_v7_proc_fin
617	globl_equ	cpu_ca15_reset,		cpu_v7_reset
618	globl_equ	cpu_ca15_do_idle,	cpu_v7_do_idle
619	globl_equ	cpu_ca15_dcache_clean_area, cpu_v7_dcache_clean_area
620#ifdef CONFIG_HARDEN_BRANCH_PREDICTOR
621	globl_equ	cpu_ca15_switch_mm,	cpu_v7_iciallu_switch_mm
622#else
623	globl_equ	cpu_ca15_switch_mm,	cpu_v7_switch_mm
624#endif
625	globl_equ	cpu_ca15_set_pte_ext,	cpu_v7_set_pte_ext
626	globl_equ	cpu_ca15_suspend_size,	cpu_v7_suspend_size
627	globl_equ	cpu_ca15_do_suspend,	cpu_v7_do_suspend
628	globl_equ	cpu_ca15_do_resume,	cpu_v7_do_resume
629	define_processor_functions ca15, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_ca15_ibe
630#ifdef CONFIG_CPU_PJ4B
631	define_processor_functions pj4b, dabort=v7_early_abort, pabort=v7_pabort, suspend=1
632#endif
633
634	.section ".rodata"
635
636	string	cpu_arch_name, "armv7"
637	string	cpu_elf_name, "v7"
638	.align
639
640	.section ".proc.info.init", #alloc
641
642	/*
643	 * Standard v7 proc info content
644	 */
645.macro __v7_proc name, initfunc, mm_mmuflags = 0, io_mmuflags = 0, hwcaps = 0, proc_fns = v7_processor_functions, cache_fns = v7_cache_fns
646	ALT_SMP(.long	PMD_TYPE_SECT | PMD_SECT_AP_WRITE | PMD_SECT_AP_READ | \
647			PMD_SECT_AF | PMD_FLAGS_SMP | \mm_mmuflags)
648	ALT_UP(.long	PMD_TYPE_SECT | PMD_SECT_AP_WRITE | PMD_SECT_AP_READ | \
649			PMD_SECT_AF | PMD_FLAGS_UP | \mm_mmuflags)
650	.long	PMD_TYPE_SECT | PMD_SECT_AP_WRITE | \
651		PMD_SECT_AP_READ | PMD_SECT_AF | \io_mmuflags
652	initfn	\initfunc, \name
653	.long	cpu_arch_name
654	.long	cpu_elf_name
655	.long	HWCAP_SWP | HWCAP_HALF | HWCAP_THUMB | HWCAP_FAST_MULT | \
656		HWCAP_EDSP | HWCAP_TLS | \hwcaps
657	.long	cpu_v7_name
658	.long	\proc_fns
659	.long	v7wbi_tlb_fns
660	.long	v6_user_fns
661	.long	\cache_fns
662.endm
663
664#ifndef CONFIG_ARM_LPAE
665	/*
666	 * ARM Ltd. Cortex A5 processor.
667	 */
668	.type   __v7_ca5mp_proc_info, #object
669__v7_ca5mp_proc_info:
670	.long	0x410fc050
671	.long	0xff0ffff0
672	__v7_proc __v7_ca5mp_proc_info, __v7_ca5mp_setup
673	.size	__v7_ca5mp_proc_info, . - __v7_ca5mp_proc_info
674
675	/*
676	 * ARM Ltd. Cortex A9 processor.
677	 */
678	.type   __v7_ca9mp_proc_info, #object
679__v7_ca9mp_proc_info:
680	.long	0x410fc090
681	.long	0xff0ffff0
682	__v7_proc __v7_ca9mp_proc_info, __v7_ca9mp_setup, proc_fns = ca9mp_processor_functions
683	.size	__v7_ca9mp_proc_info, . - __v7_ca9mp_proc_info
684
685	/*
686	 * ARM Ltd. Cortex A8 processor.
687	 */
688	.type	__v7_ca8_proc_info, #object
689__v7_ca8_proc_info:
690	.long	0x410fc080
691	.long	0xff0ffff0
692	__v7_proc __v7_ca8_proc_info, __v7_setup, proc_fns = ca8_processor_functions
693	.size	__v7_ca8_proc_info, . - __v7_ca8_proc_info
694
695#endif	/* CONFIG_ARM_LPAE */
696
697	/*
698	 * Marvell PJ4B processor.
699	 */
700#ifdef CONFIG_CPU_PJ4B
701	.type   __v7_pj4b_proc_info, #object
702__v7_pj4b_proc_info:
703	.long	0x560f5800
704	.long	0xff0fff00
705	__v7_proc __v7_pj4b_proc_info, __v7_pj4b_setup, proc_fns = pj4b_processor_functions
706	.size	__v7_pj4b_proc_info, . - __v7_pj4b_proc_info
707#endif
708
709	/*
710	 * ARM Ltd. Cortex R7 processor.
711	 */
712	.type	__v7_cr7mp_proc_info, #object
713__v7_cr7mp_proc_info:
714	.long	0x410fc170
715	.long	0xff0ffff0
716	__v7_proc __v7_cr7mp_proc_info, __v7_cr7mp_setup
717	.size	__v7_cr7mp_proc_info, . - __v7_cr7mp_proc_info
718
719	/*
720	 * ARM Ltd. Cortex R8 processor.
721	 */
722	.type	__v7_cr8mp_proc_info, #object
723__v7_cr8mp_proc_info:
724	.long	0x410fc180
725	.long	0xff0ffff0
726	__v7_proc __v7_cr8mp_proc_info, __v7_cr8mp_setup
727	.size	__v7_cr8mp_proc_info, . - __v7_cr8mp_proc_info
728
729	/*
730	 * ARM Ltd. Cortex A7 processor.
731	 */
732	.type	__v7_ca7mp_proc_info, #object
733__v7_ca7mp_proc_info:
734	.long	0x410fc070
735	.long	0xff0ffff0
736	__v7_proc __v7_ca7mp_proc_info, __v7_ca7mp_setup
737	.size	__v7_ca7mp_proc_info, . - __v7_ca7mp_proc_info
738
739	/*
740	 * ARM Ltd. Cortex A12 processor.
741	 */
742	.type	__v7_ca12mp_proc_info, #object
743__v7_ca12mp_proc_info:
744	.long	0x410fc0d0
745	.long	0xff0ffff0
746	__v7_proc __v7_ca12mp_proc_info, __v7_ca12mp_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS
747	.size	__v7_ca12mp_proc_info, . - __v7_ca12mp_proc_info
748
749	/*
750	 * ARM Ltd. Cortex A15 processor.
751	 */
752	.type	__v7_ca15mp_proc_info, #object
753__v7_ca15mp_proc_info:
754	.long	0x410fc0f0
755	.long	0xff0ffff0
756	__v7_proc __v7_ca15mp_proc_info, __v7_ca15mp_setup, proc_fns = ca15_processor_functions
757	.size	__v7_ca15mp_proc_info, . - __v7_ca15mp_proc_info
758
759	/*
760	 * Broadcom Corporation Brahma-B15 processor.
761	 */
762	.type	__v7_b15mp_proc_info, #object
763__v7_b15mp_proc_info:
764	.long	0x420f00f0
765	.long	0xff0ffff0
766	__v7_proc __v7_b15mp_proc_info, __v7_b15mp_setup, proc_fns = ca15_processor_functions, cache_fns = b15_cache_fns
767	.size	__v7_b15mp_proc_info, . - __v7_b15mp_proc_info
768
769	/*
770	 * ARM Ltd. Cortex A17 processor.
771	 */
772	.type	__v7_ca17mp_proc_info, #object
773__v7_ca17mp_proc_info:
774	.long	0x410fc0e0
775	.long	0xff0ffff0
776	__v7_proc __v7_ca17mp_proc_info, __v7_ca17mp_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS
777	.size	__v7_ca17mp_proc_info, . - __v7_ca17mp_proc_info
778
779	/* ARM Ltd. Cortex A73 processor */
780	.type	__v7_ca73_proc_info, #object
781__v7_ca73_proc_info:
782	.long	0x410fd090
783	.long	0xff0ffff0
784	__v7_proc __v7_ca73_proc_info, __v7_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS
785	.size	__v7_ca73_proc_info, . - __v7_ca73_proc_info
786
787	/* ARM Ltd. Cortex A75 processor */
788	.type	__v7_ca75_proc_info, #object
789__v7_ca75_proc_info:
790	.long	0x410fd0a0
791	.long	0xff0ffff0
792	__v7_proc __v7_ca75_proc_info, __v7_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS
793	.size	__v7_ca75_proc_info, . - __v7_ca75_proc_info
794
795	/*
796	 * Qualcomm Inc. Krait processors.
797	 */
798	.type	__krait_proc_info, #object
799__krait_proc_info:
800	.long	0x510f0400		@ Required ID value
801	.long	0xff0ffc00		@ Mask for ID
802	/*
803	 * Some Krait processors don't indicate support for SDIV and UDIV
804	 * instructions in the ARM instruction set, even though they actually
805	 * do support them. They also don't indicate support for fused multiply
806	 * instructions even though they actually do support them.
807	 */
808	__v7_proc __krait_proc_info, __v7_setup, hwcaps = HWCAP_IDIV | HWCAP_VFPv4
809	.size	__krait_proc_info, . - __krait_proc_info
810
811	/*
812	 * Match any ARMv7 processor core.
813	 */
814	.type	__v7_proc_info, #object
815__v7_proc_info:
816	.long	0x000f0000		@ Required ID value
817	.long	0x000f0000		@ Mask for ID
818	__v7_proc __v7_proc_info, __v7_setup
819	.size	__v7_proc_info, . - __v7_proc_info
820