xref: /linux/arch/arm/include/asm/vmlinux.lds.h (revision 4b132aacb0768ac1e652cf517097ea6f237214b9)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #include <asm-generic/vmlinux.lds.h>
3 
4 #ifdef CONFIG_HOTPLUG_CPU
5 #define ARM_CPU_DISCARD(x)
6 #define ARM_CPU_KEEP(x)		x
7 #else
8 #define ARM_CPU_DISCARD(x)	x
9 #define ARM_CPU_KEEP(x)
10 #endif
11 
12 #if (defined(CONFIG_SMP_ON_UP) && !defined(CONFIG_DEBUG_SPINLOCK)) || \
13 	defined(CONFIG_GENERIC_BUG) || defined(CONFIG_JUMP_LABEL)
14 #define ARM_EXIT_KEEP(x)	x
15 #define ARM_EXIT_DISCARD(x)
16 #else
17 #define ARM_EXIT_KEEP(x)
18 #define ARM_EXIT_DISCARD(x)	x
19 #endif
20 
21 #ifdef CONFIG_MMU
22 #define ARM_MMU_KEEP(x)		x
23 #define ARM_MMU_DISCARD(x)
24 #else
25 #define ARM_MMU_KEEP(x)
26 #define ARM_MMU_DISCARD(x)	x
27 #endif
28 
29 /*
30  * ld.lld does not support NOCROSSREFS:
31  * https://github.com/ClangBuiltLinux/linux/issues/1609
32  */
33 #ifdef CONFIG_LD_IS_LLD
34 #define NOCROSSREFS
35 #endif
36 
37 /* Set start/end symbol names to the LMA for the section */
38 #define ARM_LMA(sym, section)						\
39 	sym##_start = LOADADDR(section);				\
40 	sym##_end = LOADADDR(section) + SIZEOF(section)
41 
42 #define PROC_INFO							\
43 		. = ALIGN(4);						\
44 		__proc_info_begin = .;					\
45 		KEEP(*(.proc.info.init))				\
46 		__proc_info_end = .;
47 
48 #define IDMAP_TEXT							\
49 		ALIGN_FUNCTION();					\
50 		__idmap_text_start = .;					\
51 		*(.idmap.text)						\
52 		__idmap_text_end = .;					\
53 
54 #define ARM_DISCARD							\
55 		*(.ARM.exidx.exit.text)					\
56 		*(.ARM.extab.exit.text)					\
57 		*(.ARM.exidx.text.exit)					\
58 		*(.ARM.extab.text.exit)					\
59 		ARM_CPU_DISCARD(*(.ARM.exidx.cpuexit.text))		\
60 		ARM_CPU_DISCARD(*(.ARM.extab.cpuexit.text))		\
61 		ARM_EXIT_DISCARD(EXIT_TEXT)				\
62 		ARM_EXIT_DISCARD(EXIT_DATA)				\
63 		EXIT_CALL						\
64 		ARM_MMU_DISCARD(*(.text.fixup))				\
65 		ARM_MMU_DISCARD(*(__ex_table))				\
66 		COMMON_DISCARDS
67 
68 /*
69  * Sections that should stay zero sized, which is safer to explicitly
70  * check instead of blindly discarding.
71  */
72 #define ARM_ASSERTS							\
73 	.plt : {							\
74 		*(.iplt) *(.rel.iplt) *(.iplt) *(.igot.plt)		\
75 	}								\
76 	ASSERT(SIZEOF(.plt) == 0,					\
77 	       "Unexpected run-time procedure linkages detected!")
78 
79 #define ARM_DETAILS							\
80 		ELF_DETAILS						\
81 		.ARM.attributes 0 : { *(.ARM.attributes) }
82 
83 #define ARM_STUBS_TEXT							\
84 		*(.gnu.warning)						\
85 		*(.glue_7)						\
86 		*(.glue_7t)						\
87 		*(.vfp11_veneer)                                        \
88 		*(.v4_bx)
89 
90 #define ARM_TEXT							\
91 		IDMAP_TEXT						\
92 		__entry_text_start = .;					\
93 		*(.entry.text)						\
94 		__entry_text_end = .;					\
95 		IRQENTRY_TEXT						\
96 		SOFTIRQENTRY_TEXT					\
97 		TEXT_TEXT						\
98 		SCHED_TEXT						\
99 		LOCK_TEXT						\
100 		KPROBES_TEXT						\
101 		ARM_STUBS_TEXT						\
102 		. = ALIGN(4);						\
103 		*(.got)			/* Global offset table */	\
104 		ARM_CPU_KEEP(PROC_INFO)
105 
106 /* Stack unwinding tables */
107 #define ARM_UNWIND_SECTIONS						\
108 	. = ALIGN(8);							\
109 	.ARM.unwind_idx : {						\
110 		__start_unwind_idx = .;					\
111 		*(.ARM.exidx*)						\
112 		__stop_unwind_idx = .;					\
113 	}								\
114 	.ARM.unwind_tab : {						\
115 		__start_unwind_tab = .;					\
116 		*(.ARM.extab*)						\
117 		__stop_unwind_tab = .;					\
118 	}
119 
120 /*
121  * The vectors and stubs are relocatable code, and the
122  * only thing that matters is their relative offsets
123  */
124 #define ARM_VECTORS							\
125 	__vectors_lma = .;						\
126 	OVERLAY 0xffff0000 : NOCROSSREFS AT(__vectors_lma) {		\
127 		.vectors {						\
128 			*(.vectors)					\
129 		}							\
130 		.vectors.bhb.loop8 {					\
131 			*(.vectors.bhb.loop8)				\
132 		}							\
133 		.vectors.bhb.bpiall {					\
134 			*(.vectors.bhb.bpiall)				\
135 		}							\
136 	}								\
137 	ARM_LMA(__vectors, .vectors);					\
138 	ARM_LMA(__vectors_bhb_loop8, .vectors.bhb.loop8);		\
139 	ARM_LMA(__vectors_bhb_bpiall, .vectors.bhb.bpiall);		\
140 	. = __vectors_lma + SIZEOF(.vectors) +				\
141 		SIZEOF(.vectors.bhb.loop8) +				\
142 		SIZEOF(.vectors.bhb.bpiall);				\
143 									\
144 	__stubs_lma = .;						\
145 	.stubs ADDR(.vectors) + 0x1000 : AT(__stubs_lma) {		\
146 		*(.stubs)						\
147 	}								\
148 	ARM_LMA(__stubs, .stubs);					\
149 	. = __stubs_lma + SIZEOF(.stubs);				\
150 									\
151 	PROVIDE(vector_fiq_offset = vector_fiq - ADDR(.vectors));
152 
153 #define ARM_TCM								\
154 	__itcm_start = ALIGN(4);					\
155 	.text_itcm ITCM_OFFSET : AT(__itcm_start - LOAD_OFFSET) {	\
156 		__sitcm_text = .;					\
157 		*(.tcm.text)						\
158 		*(.tcm.rodata)						\
159 		. = ALIGN(4);						\
160 		__eitcm_text = .;					\
161 	}								\
162 	. = __itcm_start + SIZEOF(.text_itcm);				\
163 									\
164 	__dtcm_start = .;						\
165 	.data_dtcm DTCM_OFFSET : AT(__dtcm_start - LOAD_OFFSET) {	\
166 		__sdtcm_data = .;					\
167 		*(.tcm.data)						\
168 		. = ALIGN(4);						\
169 		__edtcm_data = .;					\
170 	}								\
171 	. = __dtcm_start + SIZEOF(.data_dtcm);
172