xref: /linux/arch/arm/mm/proc-arm946.S (revision cc04a46f11ea046ed53e2c832ae29e4790f7e35f)
1/*
2 *  linux/arch/arm/mm/arm946.S: utility functions for ARM946E-S
3 *
4 *  Copyright (C) 2004-2006 Hyok S. Choi (hyok.choi@samsung.com)
5 *
6 *  (Many of cache codes are from proc-arm926.S)
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License version 2 as
10 * published by the Free Software Foundation.
11 *
12 */
13#include <linux/linkage.h>
14#include <linux/init.h>
15#include <asm/assembler.h>
16#include <asm/hwcap.h>
17#include <asm/pgtable-hwdef.h>
18#include <asm/pgtable.h>
19#include <asm/ptrace.h>
20#include "proc-macros.S"
21
22/*
23 * ARM946E-S is synthesizable to have 0KB to 1MB sized D-Cache,
24 * comprising 256 lines of 32 bytes (8 words).
25 */
26#define CACHE_DSIZE	(CONFIG_CPU_DCACHE_SIZE) /* typically 8KB. */
27#define CACHE_DLINESIZE	32			/* fixed */
28#define CACHE_DSEGMENTS	4			/* fixed */
29#define CACHE_DENTRIES	(CACHE_DSIZE / CACHE_DSEGMENTS / CACHE_DLINESIZE)
30#define CACHE_DLIMIT	(CACHE_DSIZE * 4)	/* benchmark needed */
31
32	.text
33/*
34 * cpu_arm946_proc_init()
35 * cpu_arm946_switch_mm()
36 *
37 * These are not required.
38 */
39ENTRY(cpu_arm946_proc_init)
40ENTRY(cpu_arm946_switch_mm)
41	ret	lr
42
43/*
44 * cpu_arm946_proc_fin()
45 */
46ENTRY(cpu_arm946_proc_fin)
47	mrc	p15, 0, r0, c1, c0, 0		@ ctrl register
48	bic	r0, r0, #0x00001000		@ i-cache
49	bic	r0, r0, #0x00000004		@ d-cache
50	mcr	p15, 0, r0, c1, c0, 0		@ disable caches
51	ret	lr
52
53/*
54 * cpu_arm946_reset(loc)
55 * Params  : r0 = address to jump to
56 * Notes   : This sets up everything for a reset
57 */
58	.pushsection	.idmap.text, "ax"
59ENTRY(cpu_arm946_reset)
60	mov	ip, #0
61	mcr	p15, 0, ip, c7, c5, 0		@ flush I cache
62	mcr	p15, 0, ip, c7, c6, 0		@ flush D cache
63	mcr	p15, 0, ip, c7, c10, 4		@ drain WB
64	mrc	p15, 0, ip, c1, c0, 0		@ ctrl register
65	bic	ip, ip, #0x00000005		@ .............c.p
66	bic	ip, ip, #0x00001000		@ i-cache
67	mcr	p15, 0, ip, c1, c0, 0		@ ctrl register
68	ret	r0
69ENDPROC(cpu_arm946_reset)
70	.popsection
71
72/*
73 * cpu_arm946_do_idle()
74 */
75	.align	5
76ENTRY(cpu_arm946_do_idle)
77	mcr	p15, 0, r0, c7, c0, 4		@ Wait for interrupt
78	ret	lr
79
80/*
81 *	flush_icache_all()
82 *
83 *	Unconditionally clean and invalidate the entire icache.
84 */
85ENTRY(arm946_flush_icache_all)
86	mov	r0, #0
87	mcr	p15, 0, r0, c7, c5, 0		@ invalidate I cache
88	ret	lr
89ENDPROC(arm946_flush_icache_all)
90
91/*
92 *	flush_user_cache_all()
93 */
94ENTRY(arm946_flush_user_cache_all)
95	/* FALLTHROUGH */
96
97/*
98 *	flush_kern_cache_all()
99 *
100 *	Clean and invalidate the entire cache.
101 */
102ENTRY(arm946_flush_kern_cache_all)
103	mov	r2, #VM_EXEC
104	mov	ip, #0
105__flush_whole_cache:
106#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
107	mcr	p15, 0, ip, c7, c6, 0		@ flush D cache
108#else
109	mov	r1, #(CACHE_DSEGMENTS - 1) << 29 @ 4 segments
1101:	orr	r3, r1, #(CACHE_DENTRIES - 1) << 4 @ n entries
1112:	mcr	p15, 0, r3, c7, c14, 2		@ clean/flush D index
112	subs	r3, r3, #1 << 4
113	bcs	2b				@ entries n to 0
114	subs	r1, r1, #1 << 29
115	bcs	1b				@ segments 3 to 0
116#endif
117	tst	r2, #VM_EXEC
118	mcrne	p15, 0, ip, c7, c5, 0		@ flush I cache
119	mcrne	p15, 0, ip, c7, c10, 4		@ drain WB
120	ret	lr
121
122/*
123 *	flush_user_cache_range(start, end, flags)
124 *
125 *	Clean and invalidate a range of cache entries in the
126 *	specified address range.
127 *
128 *	- start	- start address (inclusive)
129 *	- end	- end address (exclusive)
130 *	- flags	- vm_flags describing address space
131 * (same as arm926)
132 */
133ENTRY(arm946_flush_user_cache_range)
134	mov	ip, #0
135	sub	r3, r1, r0			@ calculate total size
136	cmp	r3, #CACHE_DLIMIT
137	bhs	__flush_whole_cache
138
1391:	tst	r2, #VM_EXEC
140#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
141	mcr	p15, 0, r0, c7, c6, 1		@ invalidate D entry
142	mcrne	p15, 0, r0, c7, c5, 1		@ invalidate I entry
143	add	r0, r0, #CACHE_DLINESIZE
144	mcr	p15, 0, r0, c7, c6, 1		@ invalidate D entry
145	mcrne	p15, 0, r0, c7, c5, 1		@ invalidate I entry
146	add	r0, r0, #CACHE_DLINESIZE
147#else
148	mcr	p15, 0, r0, c7, c14, 1		@ clean and invalidate D entry
149	mcrne	p15, 0, r0, c7, c5, 1		@ invalidate I entry
150	add	r0, r0, #CACHE_DLINESIZE
151	mcr	p15, 0, r0, c7, c14, 1		@ clean and invalidate D entry
152	mcrne	p15, 0, r0, c7, c5, 1		@ invalidate I entry
153	add	r0, r0, #CACHE_DLINESIZE
154#endif
155	cmp	r0, r1
156	blo	1b
157	tst	r2, #VM_EXEC
158	mcrne	p15, 0, ip, c7, c10, 4		@ drain WB
159	ret	lr
160
161/*
162 *	coherent_kern_range(start, end)
163 *
164 *	Ensure coherency between the Icache and the Dcache in the
165 *	region described by start, end.  If you have non-snooping
166 *	Harvard caches, you need to implement this function.
167 *
168 *	- start	- virtual start address
169 *	- end	- virtual end address
170 */
171ENTRY(arm946_coherent_kern_range)
172	/* FALLTHROUGH */
173
174/*
175 *	coherent_user_range(start, end)
176 *
177 *	Ensure coherency between the Icache and the Dcache in the
178 *	region described by start, end.  If you have non-snooping
179 *	Harvard caches, you need to implement this function.
180 *
181 *	- start	- virtual start address
182 *	- end	- virtual end address
183 * (same as arm926)
184 */
185ENTRY(arm946_coherent_user_range)
186	bic	r0, r0, #CACHE_DLINESIZE - 1
1871:	mcr	p15, 0, r0, c7, c10, 1		@ clean D entry
188	mcr	p15, 0, r0, c7, c5, 1		@ invalidate I entry
189	add	r0, r0, #CACHE_DLINESIZE
190	cmp	r0, r1
191	blo	1b
192	mcr	p15, 0, r0, c7, c10, 4		@ drain WB
193	mov	r0, #0
194	ret	lr
195
196/*
197 *	flush_kern_dcache_area(void *addr, size_t size)
198 *
199 *	Ensure no D cache aliasing occurs, either with itself or
200 *	the I cache
201 *
202 *	- addr	- kernel address
203 *	- size	- region size
204 * (same as arm926)
205 */
206ENTRY(arm946_flush_kern_dcache_area)
207	add	r1, r0, r1
2081:	mcr	p15, 0, r0, c7, c14, 1		@ clean+invalidate D entry
209	add	r0, r0, #CACHE_DLINESIZE
210	cmp	r0, r1
211	blo	1b
212	mov	r0, #0
213	mcr	p15, 0, r0, c7, c5, 0		@ invalidate I cache
214	mcr	p15, 0, r0, c7, c10, 4		@ drain WB
215	ret	lr
216
217/*
218 *	dma_inv_range(start, end)
219 *
220 *	Invalidate (discard) the specified virtual address range.
221 *	May not write back any entries.  If 'start' or 'end'
222 *	are not cache line aligned, those lines must be written
223 *	back.
224 *
225 *	- start	- virtual start address
226 *	- end	- virtual end address
227 * (same as arm926)
228 */
229arm946_dma_inv_range:
230#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
231	tst	r0, #CACHE_DLINESIZE - 1
232	mcrne	p15, 0, r0, c7, c10, 1		@ clean D entry
233	tst	r1, #CACHE_DLINESIZE - 1
234	mcrne	p15, 0, r1, c7, c10, 1		@ clean D entry
235#endif
236	bic	r0, r0, #CACHE_DLINESIZE - 1
2371:	mcr	p15, 0, r0, c7, c6, 1		@ invalidate D entry
238	add	r0, r0, #CACHE_DLINESIZE
239	cmp	r0, r1
240	blo	1b
241	mcr	p15, 0, r0, c7, c10, 4		@ drain WB
242	ret	lr
243
244/*
245 *	dma_clean_range(start, end)
246 *
247 *	Clean the specified virtual address range.
248 *
249 *	- start	- virtual start address
250 *	- end	- virtual end address
251 *
252 * (same as arm926)
253 */
254arm946_dma_clean_range:
255#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
256	bic	r0, r0, #CACHE_DLINESIZE - 1
2571:	mcr	p15, 0, r0, c7, c10, 1		@ clean D entry
258	add	r0, r0, #CACHE_DLINESIZE
259	cmp	r0, r1
260	blo	1b
261#endif
262	mcr	p15, 0, r0, c7, c10, 4		@ drain WB
263	ret	lr
264
265/*
266 *	dma_flush_range(start, end)
267 *
268 *	Clean and invalidate the specified virtual address range.
269 *
270 *	- start	- virtual start address
271 *	- end	- virtual end address
272 *
273 * (same as arm926)
274 */
275ENTRY(arm946_dma_flush_range)
276	bic	r0, r0, #CACHE_DLINESIZE - 1
2771:
278#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
279	mcr	p15, 0, r0, c7, c14, 1		@ clean+invalidate D entry
280#else
281	mcr	p15, 0, r0, c7, c6, 1		@ invalidate D entry
282#endif
283	add	r0, r0, #CACHE_DLINESIZE
284	cmp	r0, r1
285	blo	1b
286	mcr	p15, 0, r0, c7, c10, 4		@ drain WB
287	ret	lr
288
289/*
290 *	dma_map_area(start, size, dir)
291 *	- start	- kernel virtual start address
292 *	- size	- size of region
293 *	- dir	- DMA direction
294 */
295ENTRY(arm946_dma_map_area)
296	add	r1, r1, r0
297	cmp	r2, #DMA_TO_DEVICE
298	beq	arm946_dma_clean_range
299	bcs	arm946_dma_inv_range
300	b	arm946_dma_flush_range
301ENDPROC(arm946_dma_map_area)
302
303/*
304 *	dma_unmap_area(start, size, dir)
305 *	- start	- kernel virtual start address
306 *	- size	- size of region
307 *	- dir	- DMA direction
308 */
309ENTRY(arm946_dma_unmap_area)
310	ret	lr
311ENDPROC(arm946_dma_unmap_area)
312
313	.globl	arm946_flush_kern_cache_louis
314	.equ	arm946_flush_kern_cache_louis, arm946_flush_kern_cache_all
315
316	@ define struct cpu_cache_fns (see <asm/cacheflush.h> and proc-macros.S)
317	define_cache_functions arm946
318
319ENTRY(cpu_arm946_dcache_clean_area)
320#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
3211:	mcr	p15, 0, r0, c7, c10, 1		@ clean D entry
322	add	r0, r0, #CACHE_DLINESIZE
323	subs	r1, r1, #CACHE_DLINESIZE
324	bhi	1b
325#endif
326	mcr	p15, 0, r0, c7, c10, 4		@ drain WB
327	ret	lr
328
329	.type	__arm946_setup, #function
330__arm946_setup:
331	mov	r0, #0
332	mcr	p15, 0, r0, c7, c5, 0		@ invalidate I cache
333	mcr	p15, 0, r0, c7, c6, 0		@ invalidate D cache
334	mcr	p15, 0, r0, c7, c10, 4		@ drain WB
335
336	mcr	p15, 0, r0, c6, c3, 0		@ disable memory region 3~7
337	mcr	p15, 0, r0, c6, c4, 0
338	mcr	p15, 0, r0, c6, c5, 0
339	mcr	p15, 0, r0, c6, c6, 0
340	mcr	p15, 0, r0, c6, c7, 0
341
342	mov	r0, #0x0000003F			@ base = 0, size = 4GB
343	mcr	p15, 0, r0, c6,	c0, 0		@ set region 0, default
344
345	ldr	r0, =(CONFIG_DRAM_BASE & 0xFFFFF000) @ base[31:12] of RAM
346	ldr	r7, =CONFIG_DRAM_SIZE		@ size of RAM (must be >= 4KB)
347	pr_val	r3, r0, r7, #1
348	mcr	p15, 0, r3, c6, c1, 0
349
350	ldr	r0, =(CONFIG_FLASH_MEM_BASE & 0xFFFFF000) @ base[31:12] of FLASH
351	ldr	r7, =CONFIG_FLASH_SIZE		@ size of FLASH (must be >= 4KB)
352	pr_val	r3, r0, r7, #1
353	mcr	p15, 0, r3, c6, c2, 0
354
355	mov	r0, #0x06
356	mcr	p15, 0, r0, c2, c0, 0		@ region 1,2 d-cacheable
357	mcr	p15, 0, r0, c2, c0, 1		@ region 1,2 i-cacheable
358#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
359	mov	r0, #0x00			@ disable whole write buffer
360#else
361	mov	r0, #0x02			@ region 1 write bufferred
362#endif
363	mcr	p15, 0, r0, c3, c0, 0
364
365/*
366 *  Access Permission Settings for future permission control by PU.
367 *
368 *				priv.	user
369 * 	region 0 (whole)	rw	--	: b0001
370 * 	region 1 (RAM)		rw	rw	: b0011
371 * 	region 2 (FLASH)	rw	r-	: b0010
372 *	region 3~7 (none)	--	--	: b0000
373 */
374	mov	r0, #0x00000031
375	orr	r0, r0, #0x00000200
376	mcr	p15, 0, r0, c5, c0, 2		@ set data access permission
377	mcr	p15, 0, r0, c5, c0, 3		@ set inst. access permission
378
379	mrc	p15, 0, r0, c1, c0		@ get control register
380	orr	r0, r0, #0x00001000		@ I-cache
381	orr	r0, r0, #0x00000005		@ MPU/D-cache
382#ifdef CONFIG_CPU_CACHE_ROUND_ROBIN
383	orr	r0, r0, #0x00004000		@ .1.. .... .... ....
384#endif
385	ret	lr
386
387	.size	__arm946_setup, . - __arm946_setup
388
389	__INITDATA
390
391	@ define struct processor (see <asm/proc-fns.h> and proc-macros.S)
392	define_processor_functions arm946, dabort=nommu_early_abort, pabort=legacy_pabort, nommu=1
393
394	.section ".rodata"
395
396	string	cpu_arch_name, "armv5te"
397	string	cpu_elf_name, "v5t"
398	string	cpu_arm946_name, "ARM946E-S"
399
400	.align
401
402	.section ".proc.info.init", #alloc
403	.type	__arm946_proc_info,#object
404__arm946_proc_info:
405	.long	0x41009460
406	.long	0xff00fff0
407	.long	0
408	.long	0
409	initfn	__arm946_setup, __arm946_proc_info
410	.long	cpu_arch_name
411	.long	cpu_elf_name
412	.long	HWCAP_SWP | HWCAP_HALF | HWCAP_THUMB
413	.long	cpu_arm946_name
414	.long	arm946_processor_functions
415	.long	0
416	.long	0
417	.long	arm946_cache_fns
418	.size	__arm946_proc_info, . - __arm946_proc_info
419
420