1/* 2 * arch/xtensa/kernel/vmlinux.lds.S 3 * 4 * Xtensa linker script 5 * 6 * This file is subject to the terms and conditions of the GNU General Public 7 * License. See the file "COPYING" in the main directory of this archive 8 * for more details. 9 * 10 * Copyright (C) 2001 - 2008 Tensilica Inc. 11 * 12 * Chris Zankel <chris@zankel.net> 13 * Marc Gauthier <marc@tensilica.com, marc@alumni.uwaterloo.ca> 14 * Joe Taylor <joe@tensilica.com, joetylr@yahoo.com> 15 */ 16 17#include <asm-generic/vmlinux.lds.h> 18#include <asm/page.h> 19#include <asm/thread_info.h> 20 21#include <asm/core.h> 22#include <asm/vectors.h> 23 24OUTPUT_ARCH(xtensa) 25ENTRY(_start) 26 27#ifdef __XTENSA_EB__ 28jiffies = jiffies_64 + 4; 29#else 30jiffies = jiffies_64; 31#endif 32 33/* Note: In the following macros, it would be nice to specify only the 34 vector name and section kind and construct "sym" and "section" using 35 CPP concatenation, but that does not work reliably. Concatenating a 36 string with "." produces an invalid token. CPP will not print a 37 warning because it thinks this is an assembly file, but it leaves 38 them as multiple tokens and there may or may not be whitespace 39 between them. */ 40 41/* Macro for a relocation entry */ 42 43#define RELOCATE_ENTRY(sym, section) \ 44 LONG(sym ## _start); \ 45 LONG(sym ## _end); \ 46 LONG(LOADADDR(section)) 47 48/* 49 * Macro to define a section for a vector. When CONFIG_VECTORS_OFFSET is 50 * defined code for every vector is located with other init data. At startup 51 * time head.S copies code for every vector to its final position according 52 * to description recorded in the corresponding RELOCATE_ENTRY. 53 */ 54 55#ifdef CONFIG_VECTORS_OFFSET 56#define SECTION_VECTOR(sym, section, addr, prevsec) \ 57 section addr : AT(((LOADADDR(prevsec) + SIZEOF(prevsec)) + 3) & ~ 3) \ 58 { \ 59 . = ALIGN(4); \ 60 sym ## _start = ABSOLUTE(.); \ 61 *(section) \ 62 sym ## _end = ABSOLUTE(.); \ 63 } 64#else 65#define SECTION_VECTOR(section, addr) \ 66 . = addr; \ 67 *(section) 68#endif 69 70/* 71 * Mapping of input sections to output sections when linking. 72 */ 73 74SECTIONS 75{ 76 . = KERNELOFFSET; 77 /* .text section */ 78 79 _text = .; 80 _stext = .; 81 82 .text : 83 { 84 /* The HEAD_TEXT section must be the first section! */ 85 HEAD_TEXT 86 87#ifndef CONFIG_VECTORS_OFFSET 88 . = ALIGN(PAGE_SIZE); 89 _vecbase = .; 90 91 SECTION_VECTOR (.WindowVectors.text, WINDOW_VECTORS_VADDR) 92#if XCHAL_EXCM_LEVEL >= 2 93 SECTION_VECTOR (.Level2InterruptVector.text, INTLEVEL2_VECTOR_VADDR) 94#endif 95#if XCHAL_EXCM_LEVEL >= 3 96 SECTION_VECTOR (.Level3InterruptVector.text, INTLEVEL3_VECTOR_VADDR) 97#endif 98#if XCHAL_EXCM_LEVEL >= 4 99 SECTION_VECTOR (.Level4InterruptVector.text, INTLEVEL4_VECTOR_VADDR) 100#endif 101#if XCHAL_EXCM_LEVEL >= 5 102 SECTION_VECTOR (.Level5InterruptVector.text, INTLEVEL5_VECTOR_VADDR) 103#endif 104#if XCHAL_EXCM_LEVEL >= 6 105 SECTION_VECTOR (.Level6InterruptVector.text, INTLEVEL6_VECTOR_VADDR) 106#endif 107 SECTION_VECTOR (.DebugInterruptVector.text, DEBUG_VECTOR_VADDR) 108 SECTION_VECTOR (.KernelExceptionVector.text, KERNEL_VECTOR_VADDR) 109 SECTION_VECTOR (.UserExceptionVector.text, USER_VECTOR_VADDR) 110 SECTION_VECTOR (.DoubleExceptionVector.text, DOUBLEEXC_VECTOR_VADDR) 111#endif 112 113 IRQENTRY_TEXT 114 SOFTIRQENTRY_TEXT 115 ENTRY_TEXT 116 TEXT_TEXT 117 SCHED_TEXT 118 CPUIDLE_TEXT 119 LOCK_TEXT 120 121 } 122 _etext = .; 123 PROVIDE (etext = .); 124 125 . = ALIGN(16); 126 127 RODATA 128 129 /* Relocation table */ 130 131 .fixup : { *(.fixup) } 132 133 EXCEPTION_TABLE(16) 134 NOTES 135 /* Data section */ 136 137#ifdef CONFIG_XIP_KERNEL 138 INIT_TEXT_SECTION(PAGE_SIZE) 139#else 140 _sdata = .; 141 RW_DATA_SECTION(XCHAL_ICACHE_LINESIZE, PAGE_SIZE, THREAD_SIZE) 142 _edata = .; 143 144 /* Initialization code and data: */ 145 146 . = ALIGN(PAGE_SIZE); 147 __init_begin = .; 148 INIT_TEXT_SECTION(PAGE_SIZE) 149 150 .init.data : 151 { 152 INIT_DATA 153 } 154#endif 155 156 .init.rodata : 157 { 158 . = ALIGN(0x4); 159 __tagtable_begin = .; 160 *(.taglist) 161 __tagtable_end = .; 162 163 . = ALIGN(16); 164 __boot_reloc_table_start = ABSOLUTE(.); 165 166#ifdef CONFIG_VECTORS_OFFSET 167 RELOCATE_ENTRY(_WindowVectors_text, 168 .WindowVectors.text); 169#if XCHAL_EXCM_LEVEL >= 2 170 RELOCATE_ENTRY(_Level2InterruptVector_text, 171 .Level2InterruptVector.text); 172#endif 173#if XCHAL_EXCM_LEVEL >= 3 174 RELOCATE_ENTRY(_Level3InterruptVector_text, 175 .Level3InterruptVector.text); 176#endif 177#if XCHAL_EXCM_LEVEL >= 4 178 RELOCATE_ENTRY(_Level4InterruptVector_text, 179 .Level4InterruptVector.text); 180#endif 181#if XCHAL_EXCM_LEVEL >= 5 182 RELOCATE_ENTRY(_Level5InterruptVector_text, 183 .Level5InterruptVector.text); 184#endif 185#if XCHAL_EXCM_LEVEL >= 6 186 RELOCATE_ENTRY(_Level6InterruptVector_text, 187 .Level6InterruptVector.text); 188#endif 189 RELOCATE_ENTRY(_KernelExceptionVector_text, 190 .KernelExceptionVector.text); 191 RELOCATE_ENTRY(_UserExceptionVector_text, 192 .UserExceptionVector.text); 193 RELOCATE_ENTRY(_DoubleExceptionVector_text, 194 .DoubleExceptionVector.text); 195 RELOCATE_ENTRY(_DebugInterruptVector_text, 196 .DebugInterruptVector.text); 197#endif 198#ifdef CONFIG_XIP_KERNEL 199 RELOCATE_ENTRY(_xip_data, .data); 200 RELOCATE_ENTRY(_xip_init_data, .init.data); 201#else 202#if defined(CONFIG_SMP) 203 RELOCATE_ENTRY(_SecondaryResetVector_text, 204 .SecondaryResetVector.text); 205#endif 206#endif 207 208 __boot_reloc_table_end = ABSOLUTE(.) ; 209 210 INIT_SETUP(XCHAL_ICACHE_LINESIZE) 211 INIT_CALLS 212 CON_INITCALL 213 INIT_RAM_FS 214 } 215 216 PERCPU_SECTION(XCHAL_ICACHE_LINESIZE) 217 218 /* We need this dummy segment here */ 219 220 . = ALIGN(4); 221 .dummy : { LONG(0) } 222 223#ifdef CONFIG_VECTORS_OFFSET 224 /* The vectors are relocated to the real position at startup time */ 225 226 SECTION_VECTOR (_WindowVectors_text, 227 .WindowVectors.text, 228 WINDOW_VECTORS_VADDR, 229 .dummy) 230 SECTION_VECTOR (_DebugInterruptVector_text, 231 .DebugInterruptVector.text, 232 DEBUG_VECTOR_VADDR, 233 .WindowVectors.text) 234#undef LAST 235#define LAST .DebugInterruptVector.text 236#if XCHAL_EXCM_LEVEL >= 2 237 SECTION_VECTOR (_Level2InterruptVector_text, 238 .Level2InterruptVector.text, 239 INTLEVEL2_VECTOR_VADDR, 240 LAST) 241# undef LAST 242# define LAST .Level2InterruptVector.text 243#endif 244#if XCHAL_EXCM_LEVEL >= 3 245 SECTION_VECTOR (_Level3InterruptVector_text, 246 .Level3InterruptVector.text, 247 INTLEVEL3_VECTOR_VADDR, 248 LAST) 249# undef LAST 250# define LAST .Level3InterruptVector.text 251#endif 252#if XCHAL_EXCM_LEVEL >= 4 253 SECTION_VECTOR (_Level4InterruptVector_text, 254 .Level4InterruptVector.text, 255 INTLEVEL4_VECTOR_VADDR, 256 LAST) 257# undef LAST 258# define LAST .Level4InterruptVector.text 259#endif 260#if XCHAL_EXCM_LEVEL >= 5 261 SECTION_VECTOR (_Level5InterruptVector_text, 262 .Level5InterruptVector.text, 263 INTLEVEL5_VECTOR_VADDR, 264 LAST) 265# undef LAST 266# define LAST .Level5InterruptVector.text 267#endif 268#if XCHAL_EXCM_LEVEL >= 6 269 SECTION_VECTOR (_Level6InterruptVector_text, 270 .Level6InterruptVector.text, 271 INTLEVEL6_VECTOR_VADDR, 272 LAST) 273# undef LAST 274# define LAST .Level6InterruptVector.text 275#endif 276 SECTION_VECTOR (_KernelExceptionVector_text, 277 .KernelExceptionVector.text, 278 KERNEL_VECTOR_VADDR, 279 LAST) 280#undef LAST 281 SECTION_VECTOR (_UserExceptionVector_text, 282 .UserExceptionVector.text, 283 USER_VECTOR_VADDR, 284 .KernelExceptionVector.text) 285 SECTION_VECTOR (_DoubleExceptionVector_text, 286 .DoubleExceptionVector.text, 287 DOUBLEEXC_VECTOR_VADDR, 288 .UserExceptionVector.text) 289 290 . = (LOADADDR( .DoubleExceptionVector.text ) + SIZEOF( .DoubleExceptionVector.text ) + 3) & ~ 3; 291 292#endif 293#if !defined(CONFIG_XIP_KERNEL) && defined(CONFIG_SMP) 294 295 SECTION_VECTOR (_SecondaryResetVector_text, 296 .SecondaryResetVector.text, 297 RESET_VECTOR1_VADDR, 298 .DoubleExceptionVector.text) 299 300 . = LOADADDR(.SecondaryResetVector.text)+SIZEOF(.SecondaryResetVector.text); 301 302#endif 303 304 . = ALIGN(PAGE_SIZE); 305 306#ifndef CONFIG_XIP_KERNEL 307 __init_end = .; 308 309 BSS_SECTION(0, 8192, 0) 310#endif 311 312 _end = .; 313 314#ifdef CONFIG_XIP_KERNEL 315 . = CONFIG_XIP_DATA_ADDR; 316 317 _xip_start = .; 318 319#undef LOAD_OFFSET 320#define LOAD_OFFSET \ 321 (CONFIG_XIP_DATA_ADDR - (LOADADDR(.dummy) + SIZEOF(.dummy) + 3) & ~ 3) 322 323 _xip_data_start = .; 324 _sdata = .; 325 RW_DATA_SECTION(XCHAL_ICACHE_LINESIZE, PAGE_SIZE, THREAD_SIZE) 326 _edata = .; 327 _xip_data_end = .; 328 329 /* Initialization data: */ 330 331 STRUCT_ALIGN(); 332 333 _xip_init_data_start = .; 334 __init_begin = .; 335 .init.data : 336 { 337 INIT_DATA 338 } 339 _xip_init_data_end = .; 340 __init_end = .; 341 BSS_SECTION(0, 8192, 0) 342 343 _xip_end = .; 344 345#undef LOAD_OFFSET 346#endif 347 348 DWARF_DEBUG 349 350 .xt.prop 0 : { KEEP(*(.xt.prop .xt.prop.* .gnu.linkonce.prop.*)) } 351 .xt.insn 0 : { KEEP(*(.xt.insn .xt.insn.* .gnu.linkonce.x*)) } 352 .xt.lit 0 : { KEEP(*(.xt.lit .xt.lit.* .gnu.linkonce.p*)) } 353 354 /* Sections to be discarded */ 355 DISCARDS 356} 357