1/* 2 * arch/xtensa/kernel/vmlinux.lds.S 3 * 4 * Xtensa linker script 5 * 6 * This file is subject to the terms and conditions of the GNU General Public 7 * License. See the file "COPYING" in the main directory of this archive 8 * for more details. 9 * 10 * Copyright (C) 2001 - 2008 Tensilica Inc. 11 * 12 * Chris Zankel <chris@zankel.net> 13 * Marc Gauthier <marc@tensilica.com, marc@alumni.uwaterloo.ca> 14 * Joe Taylor <joe@tensilica.com, joetylr@yahoo.com> 15 */ 16 17#define RO_EXCEPTION_TABLE_ALIGN 16 18 19#include <asm-generic/vmlinux.lds.h> 20#include <asm/page.h> 21#include <asm/thread_info.h> 22 23#include <asm/core.h> 24#include <asm/vectors.h> 25 26OUTPUT_ARCH(xtensa) 27ENTRY(_start) 28 29#ifdef __XTENSA_EB__ 30jiffies = jiffies_64 + 4; 31#else 32jiffies = jiffies_64; 33#endif 34 35/* Note: In the following macros, it would be nice to specify only the 36 vector name and section kind and construct "sym" and "section" using 37 CPP concatenation, but that does not work reliably. Concatenating a 38 string with "." produces an invalid token. CPP will not print a 39 warning because it thinks this is an assembly file, but it leaves 40 them as multiple tokens and there may or may not be whitespace 41 between them. */ 42 43/* Macro for a relocation entry */ 44 45#define RELOCATE_ENTRY(sym, section) \ 46 LONG(sym ## _start); \ 47 LONG(sym ## _end); \ 48 LONG(LOADADDR(section)) 49 50#if !defined(CONFIG_VECTORS_ADDR) && XCHAL_HAVE_VECBASE 51#define MERGED_VECTORS 1 52#else 53#define MERGED_VECTORS 0 54#endif 55 56/* 57 * Macro to define a section for a vector. When MERGED_VECTORS is 0 58 * code for every vector is located with other init data. At startup 59 * time head.S copies code for every vector to its final position according 60 * to description recorded in the corresponding RELOCATE_ENTRY. 61 */ 62 63#define SECTION_VECTOR4(sym, section, addr, prevsec) \ 64 section addr : AT(((LOADADDR(prevsec) + SIZEOF(prevsec)) + 3) & ~ 3) \ 65 { \ 66 . = ALIGN(4); \ 67 sym ## _start = ABSOLUTE(.); \ 68 *(section) \ 69 sym ## _end = ABSOLUTE(.); \ 70 } 71 72#define SECTION_VECTOR2(section, addr) \ 73 . = addr; \ 74 *(section) 75 76/* 77 * Mapping of input sections to output sections when linking. 78 */ 79 80SECTIONS 81{ 82 . = KERNELOFFSET; 83 /* .text section */ 84 85 _text = .; 86 _stext = .; 87 88 .text : 89 { 90 /* The HEAD_TEXT section must be the first section! */ 91 HEAD_TEXT 92 93#if MERGED_VECTORS 94 . = ALIGN(PAGE_SIZE); 95 _vecbase = .; 96 97#ifdef SUPPORT_WINDOWED 98 SECTION_VECTOR2 (.WindowVectors.text, WINDOW_VECTORS_VADDR) 99#endif 100#if XCHAL_EXCM_LEVEL >= 2 101 SECTION_VECTOR2 (.Level2InterruptVector.text, INTLEVEL2_VECTOR_VADDR) 102#endif 103#if XCHAL_EXCM_LEVEL >= 3 104 SECTION_VECTOR2 (.Level3InterruptVector.text, INTLEVEL3_VECTOR_VADDR) 105#endif 106#if XCHAL_EXCM_LEVEL >= 4 107 SECTION_VECTOR2 (.Level4InterruptVector.text, INTLEVEL4_VECTOR_VADDR) 108#endif 109#if XCHAL_EXCM_LEVEL >= 5 110 SECTION_VECTOR2 (.Level5InterruptVector.text, INTLEVEL5_VECTOR_VADDR) 111#endif 112#if XCHAL_EXCM_LEVEL >= 6 113 SECTION_VECTOR2 (.Level6InterruptVector.text, INTLEVEL6_VECTOR_VADDR) 114#endif 115 SECTION_VECTOR2 (.DebugInterruptVector.text, DEBUG_VECTOR_VADDR) 116 SECTION_VECTOR2 (.KernelExceptionVector.text, KERNEL_VECTOR_VADDR) 117 SECTION_VECTOR2 (.UserExceptionVector.text, USER_VECTOR_VADDR) 118 SECTION_VECTOR2 (.DoubleExceptionVector.text, DOUBLEEXC_VECTOR_VADDR) 119 120 *(.exception.text) 121 *(.xiptext) 122#endif 123 124 IRQENTRY_TEXT 125 SOFTIRQENTRY_TEXT 126 ENTRY_TEXT 127 TEXT_TEXT 128 SCHED_TEXT 129 LOCK_TEXT 130 *(.fixup) 131 } 132 _etext = .; 133 PROVIDE (etext = .); 134 135 . = ALIGN(16); 136 137 RO_DATA(4096) 138 139 /* Data section */ 140 141#ifdef CONFIG_XIP_KERNEL 142 INIT_TEXT_SECTION(PAGE_SIZE) 143#else 144 _sdata = .; 145 RW_DATA(XCHAL_ICACHE_LINESIZE, PAGE_SIZE, THREAD_SIZE) 146 _edata = .; 147 148 /* Initialization code and data: */ 149 150 . = ALIGN(PAGE_SIZE); 151 __init_begin = .; 152 INIT_TEXT_SECTION(PAGE_SIZE) 153 154 .init.data : 155 { 156 INIT_DATA 157 } 158#endif 159 160 .init.rodata : 161 { 162 . = ALIGN(0x4); 163 __tagtable_begin = .; 164 *(.taglist) 165 __tagtable_end = .; 166 167 . = ALIGN(16); 168 __boot_reloc_table_start = ABSOLUTE(.); 169 170#if !MERGED_VECTORS 171#ifdef SUPPORT_WINDOWED 172 RELOCATE_ENTRY(_WindowVectors_text, 173 .WindowVectors.text); 174#endif 175#if XCHAL_EXCM_LEVEL >= 2 176 RELOCATE_ENTRY(_Level2InterruptVector_text, 177 .Level2InterruptVector.text); 178#endif 179#if XCHAL_EXCM_LEVEL >= 3 180 RELOCATE_ENTRY(_Level3InterruptVector_text, 181 .Level3InterruptVector.text); 182#endif 183#if XCHAL_EXCM_LEVEL >= 4 184 RELOCATE_ENTRY(_Level4InterruptVector_text, 185 .Level4InterruptVector.text); 186#endif 187#if XCHAL_EXCM_LEVEL >= 5 188 RELOCATE_ENTRY(_Level5InterruptVector_text, 189 .Level5InterruptVector.text); 190#endif 191#if XCHAL_EXCM_LEVEL >= 6 192 RELOCATE_ENTRY(_Level6InterruptVector_text, 193 .Level6InterruptVector.text); 194#endif 195 RELOCATE_ENTRY(_KernelExceptionVector_text, 196 .KernelExceptionVector.text); 197 RELOCATE_ENTRY(_UserExceptionVector_text, 198 .UserExceptionVector.text); 199 RELOCATE_ENTRY(_DoubleExceptionVector_text, 200 .DoubleExceptionVector.text); 201 RELOCATE_ENTRY(_DebugInterruptVector_text, 202 .DebugInterruptVector.text); 203 RELOCATE_ENTRY(_exception_text, 204 .exception.text); 205#ifdef CONFIG_XIP_KERNEL 206 RELOCATE_ENTRY(_xip_text, .xiptext); 207#endif 208#endif 209#ifdef CONFIG_XIP_KERNEL 210 RELOCATE_ENTRY(_xip_data, .data); 211 RELOCATE_ENTRY(_xip_init_data, .init.data); 212#endif 213#if defined(CONFIG_SECONDARY_RESET_VECTOR) 214 RELOCATE_ENTRY(_SecondaryResetVector_text, 215 .SecondaryResetVector.text); 216#endif 217 218 __boot_reloc_table_end = ABSOLUTE(.) ; 219 220 INIT_SETUP(XCHAL_ICACHE_LINESIZE) 221 INIT_CALLS 222 CON_INITCALL 223 INIT_RAM_FS 224 } 225 226 PERCPU_SECTION(XCHAL_ICACHE_LINESIZE) 227 228 /* We need this dummy segment here */ 229 230 . = ALIGN(4); 231 .dummy : { LONG(0) } 232 233#undef LAST 234#define LAST .dummy 235 236#if !MERGED_VECTORS 237 /* The vectors are relocated to the real position at startup time */ 238 239#ifdef SUPPORT_WINDOWED 240 SECTION_VECTOR4 (_WindowVectors_text, 241 .WindowVectors.text, 242 WINDOW_VECTORS_VADDR, 243 LAST) 244#undef LAST 245#define LAST .WindowVectors.text 246#endif 247 SECTION_VECTOR4 (_DebugInterruptVector_text, 248 .DebugInterruptVector.text, 249 DEBUG_VECTOR_VADDR, 250 LAST) 251#undef LAST 252#define LAST .DebugInterruptVector.text 253#if XCHAL_EXCM_LEVEL >= 2 254 SECTION_VECTOR4 (_Level2InterruptVector_text, 255 .Level2InterruptVector.text, 256 INTLEVEL2_VECTOR_VADDR, 257 LAST) 258# undef LAST 259# define LAST .Level2InterruptVector.text 260#endif 261#if XCHAL_EXCM_LEVEL >= 3 262 SECTION_VECTOR4 (_Level3InterruptVector_text, 263 .Level3InterruptVector.text, 264 INTLEVEL3_VECTOR_VADDR, 265 LAST) 266# undef LAST 267# define LAST .Level3InterruptVector.text 268#endif 269#if XCHAL_EXCM_LEVEL >= 4 270 SECTION_VECTOR4 (_Level4InterruptVector_text, 271 .Level4InterruptVector.text, 272 INTLEVEL4_VECTOR_VADDR, 273 LAST) 274# undef LAST 275# define LAST .Level4InterruptVector.text 276#endif 277#if XCHAL_EXCM_LEVEL >= 5 278 SECTION_VECTOR4 (_Level5InterruptVector_text, 279 .Level5InterruptVector.text, 280 INTLEVEL5_VECTOR_VADDR, 281 LAST) 282# undef LAST 283# define LAST .Level5InterruptVector.text 284#endif 285#if XCHAL_EXCM_LEVEL >= 6 286 SECTION_VECTOR4 (_Level6InterruptVector_text, 287 .Level6InterruptVector.text, 288 INTLEVEL6_VECTOR_VADDR, 289 LAST) 290# undef LAST 291# define LAST .Level6InterruptVector.text 292#endif 293 SECTION_VECTOR4 (_KernelExceptionVector_text, 294 .KernelExceptionVector.text, 295 KERNEL_VECTOR_VADDR, 296 LAST) 297#undef LAST 298 SECTION_VECTOR4 (_UserExceptionVector_text, 299 .UserExceptionVector.text, 300 USER_VECTOR_VADDR, 301 .KernelExceptionVector.text) 302 SECTION_VECTOR4 (_DoubleExceptionVector_text, 303 .DoubleExceptionVector.text, 304 DOUBLEEXC_VECTOR_VADDR, 305 .UserExceptionVector.text) 306#define LAST .DoubleExceptionVector.text 307 308#endif 309#if defined(CONFIG_SECONDARY_RESET_VECTOR) 310 311 SECTION_VECTOR4 (_SecondaryResetVector_text, 312 .SecondaryResetVector.text, 313 RESET_VECTOR1_VADDR, 314 LAST) 315#undef LAST 316#define LAST .SecondaryResetVector.text 317 318#endif 319#if !MERGED_VECTORS 320 SECTION_VECTOR4 (_exception_text, 321 .exception.text, 322 , 323 LAST) 324#undef LAST 325#define LAST .exception.text 326 SECTION_VECTOR4 (_xip_text, 327 .xiptext, 328 , 329 LAST) 330#undef LAST 331#define LAST .xiptext 332#endif 333 . = (LOADADDR(LAST) + SIZEOF(LAST) + 3) & ~ 3; 334 335 .dummy1 : AT(ADDR(.dummy1)) { LONG(0) } 336 . = ALIGN(PAGE_SIZE); 337 338#ifndef CONFIG_XIP_KERNEL 339 __init_end = .; 340 341 BSS_SECTION(0, 8192, 0) 342#endif 343 344 _end = .; 345 346#ifdef CONFIG_XIP_KERNEL 347 . = CONFIG_XIP_DATA_ADDR; 348 349 _xip_start = .; 350 351#undef LOAD_OFFSET 352#define LOAD_OFFSET \ 353 (CONFIG_XIP_DATA_ADDR - (LOADADDR(.dummy1) + SIZEOF(.dummy1) + 3) & ~ 3) 354 355 _xip_data_start = .; 356 _sdata = .; 357 RW_DATA(XCHAL_ICACHE_LINESIZE, PAGE_SIZE, THREAD_SIZE) 358 _edata = .; 359 _xip_data_end = .; 360 361 /* Initialization data: */ 362 363 STRUCT_ALIGN(); 364 365 _xip_init_data_start = .; 366 __init_begin = .; 367 .init.data : 368 { 369 INIT_DATA 370 } 371 _xip_init_data_end = .; 372 __init_end = .; 373 BSS_SECTION(0, 8192, 0) 374 375 _xip_end = .; 376 377#undef LOAD_OFFSET 378#endif 379 380 DWARF_DEBUG 381 382 .xt.prop 0 : { KEEP(*(.xt.prop .xt.prop.* .gnu.linkonce.prop.*)) } 383 .xt.insn 0 : { KEEP(*(.xt.insn .xt.insn.* .gnu.linkonce.x*)) } 384 .xt.lit 0 : { KEEP(*(.xt.lit .xt.lit.* .gnu.linkonce.p*)) } 385 386 /* Sections to be discarded */ 387 DISCARDS 388} 389