1/* 2 * We need constants.h for: 3 * VMA_VM_MM 4 * VMA_VM_FLAGS 5 * VM_EXEC 6 */ 7#include <asm/asm-offsets.h> 8#include <asm/thread_info.h> 9 10/* 11 * vma_vm_mm - get mm pointer from vma pointer (vma->vm_mm) 12 */ 13 .macro vma_vm_mm, rd, rn 14 ldr \rd, [\rn, #VMA_VM_MM] 15 .endm 16 17/* 18 * vma_vm_flags - get vma->vm_flags 19 */ 20 .macro vma_vm_flags, rd, rn 21 ldr \rd, [\rn, #VMA_VM_FLAGS] 22 .endm 23 24 .macro tsk_mm, rd, rn 25 ldr \rd, [\rn, #TI_TASK] 26 ldr \rd, [\rd, #TSK_ACTIVE_MM] 27 .endm 28 29/* 30 * act_mm - get current->active_mm 31 */ 32 .macro act_mm, rd 33 bic \rd, sp, #8128 34 bic \rd, \rd, #63 35 ldr \rd, [\rd, #TI_TASK] 36 ldr \rd, [\rd, #TSK_ACTIVE_MM] 37 .endm 38 39/* 40 * mmid - get context id from mm pointer (mm->context.id) 41 * note, this field is 64bit, so in big-endian the two words are swapped too. 42 */ 43 .macro mmid, rd, rn 44#ifdef __ARMEB__ 45 ldr \rd, [\rn, #MM_CONTEXT_ID + 4 ] 46#else 47 ldr \rd, [\rn, #MM_CONTEXT_ID] 48#endif 49 .endm 50 51/* 52 * mask_asid - mask the ASID from the context ID 53 */ 54 .macro asid, rd, rn 55 and \rd, \rn, #255 56 .endm 57 58 .macro crval, clear, mmuset, ucset 59#ifdef CONFIG_MMU 60 .word \clear 61 .word \mmuset 62#else 63 .word \clear 64 .word \ucset 65#endif 66 .endm 67 68/* 69 * dcache_line_size - get the minimum D-cache line size from the CTR register 70 * on ARMv7. 71 */ 72 .macro dcache_line_size, reg, tmp 73 mrc p15, 0, \tmp, c0, c0, 1 @ read ctr 74 lsr \tmp, \tmp, #16 75 and \tmp, \tmp, #0xf @ cache line size encoding 76 mov \reg, #4 @ bytes per word 77 mov \reg, \reg, lsl \tmp @ actual cache line size 78 .endm 79 80/* 81 * icache_line_size - get the minimum I-cache line size from the CTR register 82 * on ARMv7. 83 */ 84 .macro icache_line_size, reg, tmp 85 mrc p15, 0, \tmp, c0, c0, 1 @ read ctr 86 and \tmp, \tmp, #0xf @ cache line size encoding 87 mov \reg, #4 @ bytes per word 88 mov \reg, \reg, lsl \tmp @ actual cache line size 89 .endm 90 91/* 92 * Sanity check the PTE configuration for the code below - which makes 93 * certain assumptions about how these bits are laid out. 94 */ 95#ifdef CONFIG_MMU 96#if L_PTE_SHARED != PTE_EXT_SHARED 97#error PTE shared bit mismatch 98#endif 99#if !defined (CONFIG_ARM_LPAE) && \ 100 (L_PTE_XN+L_PTE_USER+L_PTE_RDONLY+L_PTE_DIRTY+L_PTE_YOUNG+\ 101 L_PTE_FILE+L_PTE_PRESENT) > L_PTE_SHARED 102#error Invalid Linux PTE bit settings 103#endif 104#endif /* CONFIG_MMU */ 105 106/* 107 * The ARMv6 and ARMv7 set_pte_ext translation function. 108 * 109 * Permission translation: 110 * YUWD APX AP1 AP0 SVC User 111 * 0xxx 0 0 0 no acc no acc 112 * 100x 1 0 1 r/o no acc 113 * 10x0 1 0 1 r/o no acc 114 * 1011 0 0 1 r/w no acc 115 * 110x 0 1 0 r/w r/o 116 * 11x0 0 1 0 r/w r/o 117 * 1111 0 1 1 r/w r/w 118 * 119 * If !CONFIG_CPU_USE_DOMAINS, the following permissions are changed: 120 * 110x 1 1 1 r/o r/o 121 * 11x0 1 1 1 r/o r/o 122 */ 123 .macro armv6_mt_table pfx 124\pfx\()_mt_table: 125 .long 0x00 @ L_PTE_MT_UNCACHED 126 .long PTE_EXT_TEX(1) @ L_PTE_MT_BUFFERABLE 127 .long PTE_CACHEABLE @ L_PTE_MT_WRITETHROUGH 128 .long PTE_CACHEABLE | PTE_BUFFERABLE @ L_PTE_MT_WRITEBACK 129 .long PTE_BUFFERABLE @ L_PTE_MT_DEV_SHARED 130 .long 0x00 @ unused 131 .long 0x00 @ L_PTE_MT_MINICACHE (not present) 132 .long PTE_EXT_TEX(1) | PTE_CACHEABLE | PTE_BUFFERABLE @ L_PTE_MT_WRITEALLOC 133 .long 0x00 @ unused 134 .long PTE_EXT_TEX(1) @ L_PTE_MT_DEV_WC 135 .long 0x00 @ unused 136 .long PTE_CACHEABLE | PTE_BUFFERABLE @ L_PTE_MT_DEV_CACHED 137 .long PTE_EXT_TEX(2) @ L_PTE_MT_DEV_NONSHARED 138 .long 0x00 @ unused 139 .long 0x00 @ unused 140 .long 0x00 @ unused 141 .endm 142 143 .macro armv6_set_pte_ext pfx 144 str r1, [r0], #2048 @ linux version 145 146 bic r3, r1, #0x000003fc 147 bic r3, r3, #PTE_TYPE_MASK 148 orr r3, r3, r2 149 orr r3, r3, #PTE_EXT_AP0 | 2 150 151 adr ip, \pfx\()_mt_table 152 and r2, r1, #L_PTE_MT_MASK 153 ldr r2, [ip, r2] 154 155 eor r1, r1, #L_PTE_DIRTY 156 tst r1, #L_PTE_DIRTY|L_PTE_RDONLY 157 orrne r3, r3, #PTE_EXT_APX 158 159 tst r1, #L_PTE_USER 160 orrne r3, r3, #PTE_EXT_AP1 161#ifdef CONFIG_CPU_USE_DOMAINS 162 @ allow kernel read/write access to read-only user pages 163 tstne r3, #PTE_EXT_APX 164 bicne r3, r3, #PTE_EXT_APX | PTE_EXT_AP0 165#endif 166 167 tst r1, #L_PTE_XN 168 orrne r3, r3, #PTE_EXT_XN 169 170 orr r3, r3, r2 171 172 tst r1, #L_PTE_YOUNG 173 tstne r1, #L_PTE_PRESENT 174 moveq r3, #0 175#ifndef CONFIG_CPU_USE_DOMAINS 176 tstne r1, #L_PTE_NONE 177 movne r3, #0 178#endif 179 180 str r3, [r0] 181 mcr p15, 0, r0, c7, c10, 1 @ flush_pte 182 .endm 183 184 185/* 186 * The ARMv3, ARMv4 and ARMv5 set_pte_ext translation function, 187 * covering most CPUs except Xscale and Xscale 3. 188 * 189 * Permission translation: 190 * YUWD AP SVC User 191 * 0xxx 0x00 no acc no acc 192 * 100x 0x00 r/o no acc 193 * 10x0 0x00 r/o no acc 194 * 1011 0x55 r/w no acc 195 * 110x 0xaa r/w r/o 196 * 11x0 0xaa r/w r/o 197 * 1111 0xff r/w r/w 198 */ 199 .macro armv3_set_pte_ext wc_disable=1 200 str r1, [r0], #2048 @ linux version 201 202 eor r3, r1, #L_PTE_PRESENT | L_PTE_YOUNG | L_PTE_DIRTY 203 204 bic r2, r1, #PTE_SMALL_AP_MASK @ keep C, B bits 205 bic r2, r2, #PTE_TYPE_MASK 206 orr r2, r2, #PTE_TYPE_SMALL 207 208 tst r3, #L_PTE_USER @ user? 209 orrne r2, r2, #PTE_SMALL_AP_URO_SRW 210 211 tst r3, #L_PTE_RDONLY | L_PTE_DIRTY @ write and dirty? 212 orreq r2, r2, #PTE_SMALL_AP_UNO_SRW 213 214 tst r3, #L_PTE_PRESENT | L_PTE_YOUNG @ present and young? 215 movne r2, #0 216 217 .if \wc_disable 218#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH 219 tst r2, #PTE_CACHEABLE 220 bicne r2, r2, #PTE_BUFFERABLE 221#endif 222 .endif 223 str r2, [r0] @ hardware version 224 .endm 225 226 227/* 228 * Xscale set_pte_ext translation, split into two halves to cope 229 * with work-arounds. r3 must be preserved by code between these 230 * two macros. 231 * 232 * Permission translation: 233 * YUWD AP SVC User 234 * 0xxx 00 no acc no acc 235 * 100x 00 r/o no acc 236 * 10x0 00 r/o no acc 237 * 1011 01 r/w no acc 238 * 110x 10 r/w r/o 239 * 11x0 10 r/w r/o 240 * 1111 11 r/w r/w 241 */ 242 .macro xscale_set_pte_ext_prologue 243 str r1, [r0] @ linux version 244 245 eor r3, r1, #L_PTE_PRESENT | L_PTE_YOUNG | L_PTE_DIRTY 246 247 bic r2, r1, #PTE_SMALL_AP_MASK @ keep C, B bits 248 orr r2, r2, #PTE_TYPE_EXT @ extended page 249 250 tst r3, #L_PTE_USER @ user? 251 orrne r2, r2, #PTE_EXT_AP_URO_SRW @ yes -> user r/o, system r/w 252 253 tst r3, #L_PTE_RDONLY | L_PTE_DIRTY @ write and dirty? 254 orreq r2, r2, #PTE_EXT_AP_UNO_SRW @ yes -> user n/a, system r/w 255 @ combined with user -> user r/w 256 .endm 257 258 .macro xscale_set_pte_ext_epilogue 259 tst r3, #L_PTE_PRESENT | L_PTE_YOUNG @ present and young? 260 movne r2, #0 @ no -> fault 261 262 str r2, [r0, #2048]! @ hardware version 263 mov ip, #0 264 mcr p15, 0, r0, c7, c10, 1 @ clean L1 D line 265 mcr p15, 0, ip, c7, c10, 4 @ data write barrier 266 .endm 267 268.macro define_processor_functions name:req, dabort:req, pabort:req, nommu=0, suspend=0 269 .type \name\()_processor_functions, #object 270 .align 2 271ENTRY(\name\()_processor_functions) 272 .word \dabort 273 .word \pabort 274 .word cpu_\name\()_proc_init 275 .word cpu_\name\()_proc_fin 276 .word cpu_\name\()_reset 277 .word cpu_\name\()_do_idle 278 .word cpu_\name\()_dcache_clean_area 279 .word cpu_\name\()_switch_mm 280 281 .if \nommu 282 .word 0 283 .else 284 .word cpu_\name\()_set_pte_ext 285 .endif 286 287 .if \suspend 288 .word cpu_\name\()_suspend_size 289#ifdef CONFIG_PM_SLEEP 290 .word cpu_\name\()_do_suspend 291 .word cpu_\name\()_do_resume 292#else 293 .word 0 294 .word 0 295#endif 296 .else 297 .word 0 298 .word 0 299 .word 0 300 .endif 301 302 .size \name\()_processor_functions, . - \name\()_processor_functions 303.endm 304 305.macro define_cache_functions name:req 306 .align 2 307 .type \name\()_cache_fns, #object 308ENTRY(\name\()_cache_fns) 309 .long \name\()_flush_icache_all 310 .long \name\()_flush_kern_cache_all 311 .long \name\()_flush_kern_cache_louis 312 .long \name\()_flush_user_cache_all 313 .long \name\()_flush_user_cache_range 314 .long \name\()_coherent_kern_range 315 .long \name\()_coherent_user_range 316 .long \name\()_flush_kern_dcache_area 317 .long \name\()_dma_map_area 318 .long \name\()_dma_unmap_area 319 .long \name\()_dma_flush_range 320 .size \name\()_cache_fns, . - \name\()_cache_fns 321.endm 322 323.macro define_tlb_functions name:req, flags_up:req, flags_smp 324 .type \name\()_tlb_fns, #object 325ENTRY(\name\()_tlb_fns) 326 .long \name\()_flush_user_tlb_range 327 .long \name\()_flush_kern_tlb_range 328 .ifnb \flags_smp 329 ALT_SMP(.long \flags_smp ) 330 ALT_UP(.long \flags_up ) 331 .else 332 .long \flags_up 333 .endif 334 .size \name\()_tlb_fns, . - \name\()_tlb_fns 335.endm 336 337.macro globl_equ x, y 338 .globl \x 339 .equ \x, \y 340.endm 341