head.S (2bcb132c693566bcb8208cc7ce66b72a4f852ecf) head.S (424e5994e63326a42012f003f1174f3c363c7b62)
1/*
2 * linux/arch/arm/boot/compressed/head.S
3 *
4 * Copyright (C) 1996-2002 Russell King
5 * Copyright (C) 2004 Hyok S. Choi (MPU support)
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
10 */
11#include <linux/linkage.h>
1/*
2 * linux/arch/arm/boot/compressed/head.S
3 *
4 * Copyright (C) 1996-2002 Russell King
5 * Copyright (C) 2004 Hyok S. Choi (MPU support)
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
10 */
11#include <linux/linkage.h>
12#include <asm/assembler.h>
12
13/*
14 * Debugging stuff
15 *
16 * Note that these macros must not contain any code which is not
17 * 100% relocatable. Any attempt to do so will result in a crash.
18 * Please select one of the following when turning on debugging.
19 */

--- 107 unchanged lines hidden (view full) ---

127 ARM( b 1f )
128 THUMB( adr r12, BSYM(1f) )
129 THUMB( bx r12 )
130
131 .word 0x016f2818 @ Magic numbers to help the loader
132 .word start @ absolute load/run zImage address
133 .word _edata @ zImage end address
134 THUMB( .thumb )
13
14/*
15 * Debugging stuff
16 *
17 * Note that these macros must not contain any code which is not
18 * 100% relocatable. Any attempt to do so will result in a crash.
19 * Please select one of the following when turning on debugging.
20 */

--- 107 unchanged lines hidden (view full) ---

128 ARM( b 1f )
129 THUMB( adr r12, BSYM(1f) )
130 THUMB( bx r12 )
131
132 .word 0x016f2818 @ Magic numbers to help the loader
133 .word start @ absolute load/run zImage address
134 .word _edata @ zImage end address
135 THUMB( .thumb )
1351: mov r7, r1 @ save architecture ID
1361:
137 mrs r9, cpsr
138#ifdef CONFIG_ARM_VIRT_EXT
139 bl __hyp_stub_install @ get into SVC mode, reversibly
140#endif
141 mov r7, r1 @ save architecture ID
136 mov r8, r2 @ save atags pointer
137
138#ifndef __ARM_ARCH_2__
139 /*
140 * Booting from Angel - need to enter SVC mode and disable
141 * FIQs/IRQs (numeric definitions from angel arm.h source).
142 * We only do this if we were in user mode on entry.
143 */
144 mrs r2, cpsr @ get current mode
145 tst r2, #3 @ not user?
146 bne not_angel
147 mov r0, #0x17 @ angel_SWIreason_EnterSVC
148 ARM( swi 0x123456 ) @ angel_SWI_ARM
149 THUMB( svc 0xab ) @ angel_SWI_THUMB
150not_angel:
142 mov r8, r2 @ save atags pointer
143
144#ifndef __ARM_ARCH_2__
145 /*
146 * Booting from Angel - need to enter SVC mode and disable
147 * FIQs/IRQs (numeric definitions from angel arm.h source).
148 * We only do this if we were in user mode on entry.
149 */
150 mrs r2, cpsr @ get current mode
151 tst r2, #3 @ not user?
152 bne not_angel
153 mov r0, #0x17 @ angel_SWIreason_EnterSVC
154 ARM( swi 0x123456 ) @ angel_SWI_ARM
155 THUMB( svc 0xab ) @ angel_SWI_THUMB
156not_angel:
151 mrs r2, cpsr @ turn off interrupts to
152 orr r2, r2, #0xc0 @ prevent angel from running
153 msr cpsr_c, r2
157 safe_svcmode_maskall r0
158 msr spsr_cxsf, r9 @ Save the CPU boot mode in
159 @ SPSR
154#else
155 teqp pc, #0x0c000003 @ turn off interrupts
156#endif
157
158 /*
159 * Note that some cache flushing and other stuff may
160 * be needed here - is there an Angel SWI call for this?
161 */

--- 183 unchanged lines hidden (view full) ---

345 */
346 add r10, r10, #((reloc_code_end - restart + 256) & ~255)
347 bic r10, r10, #255
348
349 /* Get start of code we want to copy and align it down. */
350 adr r5, restart
351 bic r5, r5, #31
352
160#else
161 teqp pc, #0x0c000003 @ turn off interrupts
162#endif
163
164 /*
165 * Note that some cache flushing and other stuff may
166 * be needed here - is there an Angel SWI call for this?
167 */

--- 183 unchanged lines hidden (view full) ---

351 */
352 add r10, r10, #((reloc_code_end - restart + 256) & ~255)
353 bic r10, r10, #255
354
355 /* Get start of code we want to copy and align it down. */
356 adr r5, restart
357 bic r5, r5, #31
358
359/* Relocate the hyp vector base if necessary */
360#ifdef CONFIG_ARM_VIRT_EXT
361 mrs r0, spsr
362 and r0, r0, #MODE_MASK
363 cmp r0, #HYP_MODE
364 bne 1f
365
366 bl __hyp_get_vectors
367 sub r0, r0, r5
368 add r0, r0, r10
369 bl __hyp_set_vectors
3701:
371#endif
372
353 sub r9, r6, r5 @ size to copy
354 add r9, r9, #31 @ rounded up to a multiple
355 bic r9, r9, #31 @ ... of 32 bytes
356 add r6, r9, r5
357 add r9, r9, r10
358
3591: ldmdb r6!, {r0 - r3, r10 - r12, lr}
360 cmp r6, r5

--- 92 unchanged lines hidden (view full) ---

453 */
454 mov r0, r4
455 mov r1, sp @ malloc space above stack
456 add r2, sp, #0x10000 @ 64k max
457 mov r3, r7
458 bl decompress_kernel
459 bl cache_clean_flush
460 bl cache_off
373 sub r9, r6, r5 @ size to copy
374 add r9, r9, #31 @ rounded up to a multiple
375 bic r9, r9, #31 @ ... of 32 bytes
376 add r6, r9, r5
377 add r9, r9, r10
378
3791: ldmdb r6!, {r0 - r3, r10 - r12, lr}
380 cmp r6, r5

--- 92 unchanged lines hidden (view full) ---

473 */
474 mov r0, r4
475 mov r1, sp @ malloc space above stack
476 add r2, sp, #0x10000 @ 64k max
477 mov r3, r7
478 bl decompress_kernel
479 bl cache_clean_flush
480 bl cache_off
461 mov r0, #0 @ must be zero
462 mov r1, r7 @ restore architecture number
463 mov r2, r8 @ restore atags pointer
481 mov r1, r7 @ restore architecture number
482 mov r2, r8 @ restore atags pointer
464 ARM( mov pc, r4 ) @ call kernel
465 THUMB( bx r4 ) @ entry point is always ARM
466
483
484#ifdef CONFIG_ARM_VIRT_EXT
485 mrs r0, spsr @ Get saved CPU boot mode
486 and r0, r0, #MODE_MASK
487 cmp r0, #HYP_MODE @ if not booted in HYP mode...
488 bne __enter_kernel @ boot kernel directly
489
490 adr r12, .L__hyp_reentry_vectors_offset
491 ldr r0, [r12]
492 add r0, r0, r12
493
494 bl __hyp_set_vectors
495 __HVC(0) @ otherwise bounce to hyp mode
496
497 b . @ should never be reached
498
467 .align 2
499 .align 2
500.L__hyp_reentry_vectors_offset: .long __hyp_reentry_vectors - .
501#else
502 b __enter_kernel
503#endif
504
505 .align 2
468 .type LC0, #object
469LC0: .word LC0 @ r1
470 .word __bss_start @ r2
471 .word _end @ r3
472 .word _edata @ r6
473 .word input_data_end - 4 @ r10 (inflated size location)
474 .word _got_start @ r11
475 .word _got_end @ ip

--- 178 unchanged lines hidden (view full) ---

654#endif
655 mrc p15, 0, r0, c1, c0, 0 @ read control reg
656 orr r0, r0, #0x5000 @ I-cache enable, RR cache replacement
657 orr r0, r0, #0x003c @ write buffer
658#ifdef CONFIG_MMU
659#ifdef CONFIG_CPU_ENDIAN_BE8
660 orr r0, r0, #1 << 25 @ big-endian page tables
661#endif
506 .type LC0, #object
507LC0: .word LC0 @ r1
508 .word __bss_start @ r2
509 .word _end @ r3
510 .word _edata @ r6
511 .word input_data_end - 4 @ r10 (inflated size location)
512 .word _got_start @ r11
513 .word _got_end @ ip

--- 178 unchanged lines hidden (view full) ---

692#endif
693 mrc p15, 0, r0, c1, c0, 0 @ read control reg
694 orr r0, r0, #0x5000 @ I-cache enable, RR cache replacement
695 orr r0, r0, #0x003c @ write buffer
696#ifdef CONFIG_MMU
697#ifdef CONFIG_CPU_ENDIAN_BE8
698 orr r0, r0, #1 << 25 @ big-endian page tables
699#endif
662 mrcne p15, 0, r6, c2, c0, 2 @ read ttb control reg
663 orrne r0, r0, #1 @ MMU enabled
664 movne r1, #0xfffffffd @ domain 0 = client
700 orrne r0, r0, #1 @ MMU enabled
701 movne r1, #0xfffffffd @ domain 0 = client
665 bic r6, r6, #1 << 31 @ 32-bit translation system
666 bic r6, r6, #3 << 0 @ use only ttbr0
667 mcrne p15, 0, r3, c2, c0, 0 @ load page table pointer
668 mcrne p15, 0, r1, c3, c0, 0 @ load domain access control
702 mcrne p15, 0, r3, c2, c0, 0 @ load page table pointer
703 mcrne p15, 0, r1, c3, c0, 0 @ load domain access control
669 mcrne p15, 0, r6, c2, c0, 2 @ load ttb control
670#endif
671 mcr p15, 0, r0, c7, c5, 4 @ ISB
672 mcr p15, 0, r0, c1, c0, 0 @ load control register
673 mrc p15, 0, r0, c1, c0, 0 @ and read it back
674 mov r0, #0
675 mcr p15, 0, r0, c7, c5, 4 @ ISB
676 mov pc, r12
677

--- 512 unchanged lines hidden (view full) ---

1190 mov r0, #'\n'
1191 bl putc
1192 cmp r11, #64
1193 blt 2b
1194 mov pc, r10
1195#endif
1196
1197 .ltorg
704#endif
705 mcr p15, 0, r0, c7, c5, 4 @ ISB
706 mcr p15, 0, r0, c1, c0, 0 @ load control register
707 mrc p15, 0, r0, c1, c0, 0 @ and read it back
708 mov r0, #0
709 mcr p15, 0, r0, c7, c5, 4 @ ISB
710 mov pc, r12
711

--- 512 unchanged lines hidden (view full) ---

1224 mov r0, #'\n'
1225 bl putc
1226 cmp r11, #64
1227 blt 2b
1228 mov pc, r10
1229#endif
1230
1231 .ltorg
1232
1233#ifdef CONFIG_ARM_VIRT_EXT
1234.align 5
1235__hyp_reentry_vectors:
1236 W(b) . @ reset
1237 W(b) . @ undef
1238 W(b) . @ svc
1239 W(b) . @ pabort
1240 W(b) . @ dabort
1241 W(b) __enter_kernel @ hyp
1242 W(b) . @ irq
1243 W(b) . @ fiq
1244#endif /* CONFIG_ARM_VIRT_EXT */
1245
1246__enter_kernel:
1247 mov r0, #0 @ must be 0
1248 ARM( mov pc, r4 ) @ call kernel
1249 THUMB( bx r4 ) @ entry point is always ARM
1250
1198reloc_code_end:
1199
1200 .align
1201 .section ".stack", "aw", %nobits
1202.L_user_stack: .space 4096
1203.L_user_stack_end:
1251reloc_code_end:
1252
1253 .align
1254 .section ".stack", "aw", %nobits
1255.L_user_stack: .space 4096
1256.L_user_stack_end: