1/* SPDX-License-Identifier: GPL-2.0-only */ 2/* 3 * linux/arch/arm/mm/arm946.S: utility functions for ARM946E-S 4 * 5 * Copyright (C) 2004-2006 Hyok S. Choi (hyok.choi@samsung.com) 6 * 7 * (Many of cache codes are from proc-arm926.S) 8 */ 9#include <linux/linkage.h> 10#include <linux/init.h> 11#include <linux/cfi_types.h> 12#include <linux/pgtable.h> 13#include <asm/assembler.h> 14#include <asm/hwcap.h> 15#include <asm/pgtable-hwdef.h> 16#include <asm/ptrace.h> 17#include "proc-macros.S" 18 19/* 20 * ARM946E-S is synthesizable to have 0KB to 1MB sized D-Cache, 21 * comprising 256 lines of 32 bytes (8 words). 22 */ 23#define CACHE_DSIZE (CONFIG_CPU_DCACHE_SIZE) /* typically 8KB. */ 24#define CACHE_DLINESIZE 32 /* fixed */ 25#define CACHE_DSEGMENTS 4 /* fixed */ 26#define CACHE_DENTRIES (CACHE_DSIZE / CACHE_DSEGMENTS / CACHE_DLINESIZE) 27#define CACHE_DLIMIT (CACHE_DSIZE * 4) /* benchmark needed */ 28 29 .text 30/* 31 * cpu_arm946_proc_init() 32 * cpu_arm946_switch_mm() 33 * 34 * These are not required. 35 */ 36SYM_TYPED_FUNC_START(cpu_arm946_proc_init) 37 ret lr 38SYM_FUNC_END(cpu_arm946_proc_init) 39 40SYM_TYPED_FUNC_START(cpu_arm946_switch_mm) 41 ret lr 42SYM_FUNC_END(cpu_arm946_switch_mm) 43 44/* 45 * cpu_arm946_proc_fin() 46 */ 47SYM_TYPED_FUNC_START(cpu_arm946_proc_fin) 48 mrc p15, 0, r0, c1, c0, 0 @ ctrl register 49 bic r0, r0, #0x00001000 @ i-cache 50 bic r0, r0, #0x00000004 @ d-cache 51 mcr p15, 0, r0, c1, c0, 0 @ disable caches 52 ret lr 53SYM_FUNC_END(cpu_arm946_proc_fin) 54 55/* 56 * cpu_arm946_reset(loc) 57 * Params : r0 = address to jump to 58 * Notes : This sets up everything for a reset 59 */ 60 .pushsection .idmap.text, "ax" 61SYM_TYPED_FUNC_START(cpu_arm946_reset) 62 mov ip, #0 63 mcr p15, 0, ip, c7, c5, 0 @ flush I cache 64 mcr p15, 0, ip, c7, c6, 0 @ flush D cache 65 mcr p15, 0, ip, c7, c10, 4 @ drain WB 66 mrc p15, 0, ip, c1, c0, 0 @ ctrl register 67 bic ip, ip, #0x00000005 @ .............c.p 68 bic ip, ip, #0x00001000 @ i-cache 69 mcr p15, 0, ip, c1, c0, 0 @ ctrl register 70 ret r0 71SYM_FUNC_END(cpu_arm946_reset) 72 .popsection 73 74/* 75 * cpu_arm946_do_idle() 76 */ 77 .align 5 78SYM_TYPED_FUNC_START(cpu_arm946_do_idle) 79 mcr p15, 0, r0, c7, c0, 4 @ Wait for interrupt 80 ret lr 81SYM_FUNC_END(cpu_arm946_do_idle) 82 83/* 84 * flush_icache_all() 85 * 86 * Unconditionally clean and invalidate the entire icache. 87 */ 88SYM_TYPED_FUNC_START(arm946_flush_icache_all) 89 mov r0, #0 90 mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 91 ret lr 92SYM_FUNC_END(arm946_flush_icache_all) 93 94/* 95 * flush_user_cache_all() 96 */ 97SYM_FUNC_ALIAS(arm946_flush_user_cache_all, arm946_flush_kern_cache_all) 98 99/* 100 * flush_kern_cache_all() 101 * 102 * Clean and invalidate the entire cache. 103 */ 104SYM_TYPED_FUNC_START(arm946_flush_kern_cache_all) 105 mov r2, #VM_EXEC 106 mov ip, #0 107__flush_whole_cache: 108#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH 109 mcr p15, 0, ip, c7, c6, 0 @ flush D cache 110#else 111 mov r1, #(CACHE_DSEGMENTS - 1) << 29 @ 4 segments 1121: orr r3, r1, #(CACHE_DENTRIES - 1) << 4 @ n entries 1132: mcr p15, 0, r3, c7, c14, 2 @ clean/flush D index 114 subs r3, r3, #1 << 4 115 bcs 2b @ entries n to 0 116 subs r1, r1, #1 << 29 117 bcs 1b @ segments 3 to 0 118#endif 119 tst r2, #VM_EXEC 120 mcrne p15, 0, ip, c7, c5, 0 @ flush I cache 121 mcrne p15, 0, ip, c7, c10, 4 @ drain WB 122 ret lr 123SYM_FUNC_END(arm946_flush_kern_cache_all) 124 125/* 126 * flush_user_cache_range(start, end, flags) 127 * 128 * Clean and invalidate a range of cache entries in the 129 * specified address range. 130 * 131 * - start - start address (inclusive) 132 * - end - end address (exclusive) 133 * - flags - vm_flags describing address space 134 * (same as arm926) 135 */ 136SYM_TYPED_FUNC_START(arm946_flush_user_cache_range) 137 mov ip, #0 138 sub r3, r1, r0 @ calculate total size 139 cmp r3, #CACHE_DLIMIT 140 bhs __flush_whole_cache 141 1421: tst r2, #VM_EXEC 143#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH 144 mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry 145 mcrne p15, 0, r0, c7, c5, 1 @ invalidate I entry 146 add r0, r0, #CACHE_DLINESIZE 147 mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry 148 mcrne p15, 0, r0, c7, c5, 1 @ invalidate I entry 149 add r0, r0, #CACHE_DLINESIZE 150#else 151 mcr p15, 0, r0, c7, c14, 1 @ clean and invalidate D entry 152 mcrne p15, 0, r0, c7, c5, 1 @ invalidate I entry 153 add r0, r0, #CACHE_DLINESIZE 154 mcr p15, 0, r0, c7, c14, 1 @ clean and invalidate D entry 155 mcrne p15, 0, r0, c7, c5, 1 @ invalidate I entry 156 add r0, r0, #CACHE_DLINESIZE 157#endif 158 cmp r0, r1 159 blo 1b 160 tst r2, #VM_EXEC 161 mcrne p15, 0, ip, c7, c10, 4 @ drain WB 162 ret lr 163SYM_FUNC_END(arm946_flush_user_cache_range) 164 165/* 166 * coherent_kern_range(start, end) 167 * 168 * Ensure coherency between the Icache and the Dcache in the 169 * region described by start, end. If you have non-snooping 170 * Harvard caches, you need to implement this function. 171 * 172 * - start - virtual start address 173 * - end - virtual end address 174 */ 175SYM_TYPED_FUNC_START(arm946_coherent_kern_range) 176#ifdef CONFIG_CFI_CLANG /* Fallthrough if !CFI */ 177 b arm946_coherent_user_range 178#endif 179SYM_FUNC_END(arm946_coherent_kern_range) 180 181/* 182 * coherent_user_range(start, end) 183 * 184 * Ensure coherency between the Icache and the Dcache in the 185 * region described by start, end. If you have non-snooping 186 * Harvard caches, you need to implement this function. 187 * 188 * - start - virtual start address 189 * - end - virtual end address 190 * (same as arm926) 191 */ 192SYM_TYPED_FUNC_START(arm946_coherent_user_range) 193 bic r0, r0, #CACHE_DLINESIZE - 1 1941: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 195 mcr p15, 0, r0, c7, c5, 1 @ invalidate I entry 196 add r0, r0, #CACHE_DLINESIZE 197 cmp r0, r1 198 blo 1b 199 mcr p15, 0, r0, c7, c10, 4 @ drain WB 200 mov r0, #0 201 ret lr 202SYM_FUNC_END(arm946_coherent_user_range) 203 204/* 205 * flush_kern_dcache_area(void *addr, size_t size) 206 * 207 * Ensure no D cache aliasing occurs, either with itself or 208 * the I cache 209 * 210 * - addr - kernel address 211 * - size - region size 212 * (same as arm926) 213 */ 214SYM_TYPED_FUNC_START(arm946_flush_kern_dcache_area) 215 add r1, r0, r1 2161: mcr p15, 0, r0, c7, c14, 1 @ clean+invalidate D entry 217 add r0, r0, #CACHE_DLINESIZE 218 cmp r0, r1 219 blo 1b 220 mov r0, #0 221 mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 222 mcr p15, 0, r0, c7, c10, 4 @ drain WB 223 ret lr 224SYM_FUNC_END(arm946_flush_kern_dcache_area) 225 226/* 227 * dma_inv_range(start, end) 228 * 229 * Invalidate (discard) the specified virtual address range. 230 * May not write back any entries. If 'start' or 'end' 231 * are not cache line aligned, those lines must be written 232 * back. 233 * 234 * - start - virtual start address 235 * - end - virtual end address 236 * (same as arm926) 237 */ 238arm946_dma_inv_range: 239#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH 240 tst r0, #CACHE_DLINESIZE - 1 241 mcrne p15, 0, r0, c7, c10, 1 @ clean D entry 242 tst r1, #CACHE_DLINESIZE - 1 243 mcrne p15, 0, r1, c7, c10, 1 @ clean D entry 244#endif 245 bic r0, r0, #CACHE_DLINESIZE - 1 2461: mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry 247 add r0, r0, #CACHE_DLINESIZE 248 cmp r0, r1 249 blo 1b 250 mcr p15, 0, r0, c7, c10, 4 @ drain WB 251 ret lr 252 253/* 254 * dma_clean_range(start, end) 255 * 256 * Clean the specified virtual address range. 257 * 258 * - start - virtual start address 259 * - end - virtual end address 260 * 261 * (same as arm926) 262 */ 263arm946_dma_clean_range: 264#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH 265 bic r0, r0, #CACHE_DLINESIZE - 1 2661: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 267 add r0, r0, #CACHE_DLINESIZE 268 cmp r0, r1 269 blo 1b 270#endif 271 mcr p15, 0, r0, c7, c10, 4 @ drain WB 272 ret lr 273 274/* 275 * dma_flush_range(start, end) 276 * 277 * Clean and invalidate the specified virtual address range. 278 * 279 * - start - virtual start address 280 * - end - virtual end address 281 * 282 * (same as arm926) 283 */ 284SYM_TYPED_FUNC_START(arm946_dma_flush_range) 285 bic r0, r0, #CACHE_DLINESIZE - 1 2861: 287#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH 288 mcr p15, 0, r0, c7, c14, 1 @ clean+invalidate D entry 289#else 290 mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry 291#endif 292 add r0, r0, #CACHE_DLINESIZE 293 cmp r0, r1 294 blo 1b 295 mcr p15, 0, r0, c7, c10, 4 @ drain WB 296 ret lr 297SYM_FUNC_END(arm946_dma_flush_range) 298 299/* 300 * dma_map_area(start, size, dir) 301 * - start - kernel virtual start address 302 * - size - size of region 303 * - dir - DMA direction 304 */ 305SYM_TYPED_FUNC_START(arm946_dma_map_area) 306 add r1, r1, r0 307 cmp r2, #DMA_TO_DEVICE 308 beq arm946_dma_clean_range 309 bcs arm946_dma_inv_range 310 b arm946_dma_flush_range 311SYM_FUNC_END(arm946_dma_map_area) 312 313/* 314 * dma_unmap_area(start, size, dir) 315 * - start - kernel virtual start address 316 * - size - size of region 317 * - dir - DMA direction 318 */ 319SYM_TYPED_FUNC_START(arm946_dma_unmap_area) 320 ret lr 321SYM_FUNC_END(arm946_dma_unmap_area) 322 323SYM_TYPED_FUNC_START(cpu_arm946_dcache_clean_area) 324#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH 3251: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 326 add r0, r0, #CACHE_DLINESIZE 327 subs r1, r1, #CACHE_DLINESIZE 328 bhi 1b 329#endif 330 mcr p15, 0, r0, c7, c10, 4 @ drain WB 331 ret lr 332SYM_FUNC_END(cpu_arm946_dcache_clean_area) 333 334 .type __arm946_setup, #function 335__arm946_setup: 336 mov r0, #0 337 mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 338 mcr p15, 0, r0, c7, c6, 0 @ invalidate D cache 339 mcr p15, 0, r0, c7, c10, 4 @ drain WB 340 341 mcr p15, 0, r0, c6, c3, 0 @ disable memory region 3~7 342 mcr p15, 0, r0, c6, c4, 0 343 mcr p15, 0, r0, c6, c5, 0 344 mcr p15, 0, r0, c6, c6, 0 345 mcr p15, 0, r0, c6, c7, 0 346 347 mov r0, #0x0000003F @ base = 0, size = 4GB 348 mcr p15, 0, r0, c6, c0, 0 @ set region 0, default 349 350 ldr r0, =(CONFIG_DRAM_BASE & 0xFFFFF000) @ base[31:12] of RAM 351 ldr r7, =CONFIG_DRAM_SIZE @ size of RAM (must be >= 4KB) 352 pr_val r3, r0, r7, #1 353 mcr p15, 0, r3, c6, c1, 0 354 355 ldr r0, =(CONFIG_FLASH_MEM_BASE & 0xFFFFF000) @ base[31:12] of FLASH 356 ldr r7, =CONFIG_FLASH_SIZE @ size of FLASH (must be >= 4KB) 357 pr_val r3, r0, r7, #1 358 mcr p15, 0, r3, c6, c2, 0 359 360 mov r0, #0x06 361 mcr p15, 0, r0, c2, c0, 0 @ region 1,2 d-cacheable 362 mcr p15, 0, r0, c2, c0, 1 @ region 1,2 i-cacheable 363#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH 364 mov r0, #0x00 @ disable whole write buffer 365#else 366 mov r0, #0x02 @ region 1 write bufferred 367#endif 368 mcr p15, 0, r0, c3, c0, 0 369 370/* 371 * Access Permission Settings for future permission control by PU. 372 * 373 * priv. user 374 * region 0 (whole) rw -- : b0001 375 * region 1 (RAM) rw rw : b0011 376 * region 2 (FLASH) rw r- : b0010 377 * region 3~7 (none) -- -- : b0000 378 */ 379 mov r0, #0x00000031 380 orr r0, r0, #0x00000200 381 mcr p15, 0, r0, c5, c0, 2 @ set data access permission 382 mcr p15, 0, r0, c5, c0, 3 @ set inst. access permission 383 384 mrc p15, 0, r0, c1, c0 @ get control register 385 orr r0, r0, #0x00001000 @ I-cache 386 orr r0, r0, #0x00000005 @ MPU/D-cache 387#ifdef CONFIG_CPU_CACHE_ROUND_ROBIN 388 orr r0, r0, #0x00004000 @ .1.. .... .... .... 389#endif 390 ret lr 391 392 .size __arm946_setup, . - __arm946_setup 393 394 __INITDATA 395 396 @ define struct processor (see <asm/proc-fns.h> and proc-macros.S) 397 define_processor_functions arm946, dabort=nommu_early_abort, pabort=legacy_pabort, nommu=1 398 399 .section ".rodata" 400 401 string cpu_arch_name, "armv5te" 402 string cpu_elf_name, "v5t" 403 string cpu_arm946_name, "ARM946E-S" 404 405 .align 406 407 .section ".proc.info.init", "a" 408 .type __arm946_proc_info,#object 409__arm946_proc_info: 410 .long 0x41009460 411 .long 0xff00fff0 412 .long 0 413 .long 0 414 initfn __arm946_setup, __arm946_proc_info 415 .long cpu_arch_name 416 .long cpu_elf_name 417 .long HWCAP_SWP | HWCAP_HALF | HWCAP_THUMB 418 .long cpu_arm946_name 419 .long arm946_processor_functions 420 .long 0 421 .long 0 422 .long arm946_cache_fns 423 .size __arm946_proc_info, . - __arm946_proc_info 424 425