1/* 2 * This file contains low level CPU setup functions. 3 * Copyright (C) 2003 Benjamin Herrenschmidt (benh@kernel.crashing.org) 4 * 5 * This program is free software; you can redistribute it and/or 6 * modify it under the terms of the GNU General Public License 7 * as published by the Free Software Foundation; either version 8 * 2 of the License, or (at your option) any later version. 9 * 10 */ 11 12#include <asm/processor.h> 13#include <asm/page.h> 14#include <asm/cputable.h> 15#include <asm/ppc_asm.h> 16#include <asm/asm-offsets.h> 17#include <asm/cache.h> 18#include <asm/mmu.h> 19#include <asm/feature-fixups.h> 20 21_GLOBAL(__setup_cpu_603) 22 mflr r5 23BEGIN_MMU_FTR_SECTION 24 li r10,0 25 mtspr SPRN_SPRG_603_LRU,r10 /* init SW LRU tracking */ 26END_MMU_FTR_SECTION_IFSET(MMU_FTR_NEED_DTLB_SW_LRU) 27BEGIN_FTR_SECTION 28 bl __init_fpu_registers 29END_FTR_SECTION_IFCLR(CPU_FTR_FPU_UNAVAILABLE) 30 bl setup_common_caches 31 mtlr r5 32 blr 33_GLOBAL(__setup_cpu_604) 34 mflr r5 35 bl setup_common_caches 36 bl setup_604_hid0 37 mtlr r5 38 blr 39_GLOBAL(__setup_cpu_750) 40 mflr r5 41 bl __init_fpu_registers 42 bl setup_common_caches 43 bl setup_750_7400_hid0 44 mtlr r5 45 blr 46_GLOBAL(__setup_cpu_750cx) 47 mflr r5 48 bl __init_fpu_registers 49 bl setup_common_caches 50 bl setup_750_7400_hid0 51 bl setup_750cx 52 mtlr r5 53 blr 54_GLOBAL(__setup_cpu_750fx) 55 mflr r5 56 bl __init_fpu_registers 57 bl setup_common_caches 58 bl setup_750_7400_hid0 59 bl setup_750fx 60 mtlr r5 61 blr 62_GLOBAL(__setup_cpu_7400) 63 mflr r5 64 bl __init_fpu_registers 65 bl setup_7400_workarounds 66 bl setup_common_caches 67 bl setup_750_7400_hid0 68 mtlr r5 69 blr 70_GLOBAL(__setup_cpu_7410) 71 mflr r5 72 bl __init_fpu_registers 73 bl setup_7410_workarounds 74 bl setup_common_caches 75 bl setup_750_7400_hid0 76 li r3,0 77 mtspr SPRN_L2CR2,r3 78 mtlr r5 79 blr 80_GLOBAL(__setup_cpu_745x) 81 mflr r5 82 bl setup_common_caches 83 bl setup_745x_specifics 84 mtlr r5 85 blr 86 87/* Enable caches for 603's, 604, 750 & 7400 */ 88setup_common_caches: 89 mfspr r11,SPRN_HID0 90 andi. r0,r11,HID0_DCE 91 ori r11,r11,HID0_ICE|HID0_DCE 92 ori r8,r11,HID0_ICFI 93 bne 1f /* don't invalidate the D-cache */ 94 ori r8,r8,HID0_DCI /* unless it wasn't enabled */ 951: sync 96 mtspr SPRN_HID0,r8 /* enable and invalidate caches */ 97 sync 98 mtspr SPRN_HID0,r11 /* enable caches */ 99 sync 100 isync 101 blr 102 103/* 604, 604e, 604ev, ... 104 * Enable superscalar execution & branch history table 105 */ 106setup_604_hid0: 107 mfspr r11,SPRN_HID0 108 ori r11,r11,HID0_SIED|HID0_BHTE 109 ori r8,r11,HID0_BTCD 110 sync 111 mtspr SPRN_HID0,r8 /* flush branch target address cache */ 112 sync /* on 604e/604r */ 113 mtspr SPRN_HID0,r11 114 sync 115 isync 116 blr 117 118/* 7400 <= rev 2.7 and 7410 rev = 1.0 suffer from some 119 * erratas we work around here. 120 * Moto MPC710CE.pdf describes them, those are errata 121 * #3, #4 and #5 122 * Note that we assume the firmware didn't choose to 123 * apply other workarounds (there are other ones documented 124 * in the .pdf). It appear that Apple firmware only works 125 * around #3 and with the same fix we use. We may want to 126 * check if the CPU is using 60x bus mode in which case 127 * the workaround for errata #4 is useless. Also, we may 128 * want to explicitly clear HID0_NOPDST as this is not 129 * needed once we have applied workaround #5 (though it's 130 * not set by Apple's firmware at least). 131 */ 132setup_7400_workarounds: 133 mfpvr r3 134 rlwinm r3,r3,0,20,31 135 cmpwi 0,r3,0x0207 136 ble 1f 137 blr 138setup_7410_workarounds: 139 mfpvr r3 140 rlwinm r3,r3,0,20,31 141 cmpwi 0,r3,0x0100 142 bnelr 1431: 144 mfspr r11,SPRN_MSSSR0 145 /* Errata #3: Set L1OPQ_SIZE to 0x10 */ 146 rlwinm r11,r11,0,9,6 147 oris r11,r11,0x0100 148 /* Errata #4: Set L2MQ_SIZE to 1 (check for MPX mode first ?) */ 149 oris r11,r11,0x0002 150 /* Errata #5: Set DRLT_SIZE to 0x01 */ 151 rlwinm r11,r11,0,5,2 152 oris r11,r11,0x0800 153 sync 154 mtspr SPRN_MSSSR0,r11 155 sync 156 isync 157 blr 158 159/* 740/750/7400/7410 160 * Enable Store Gathering (SGE), Address Broadcast (ABE), 161 * Branch History Table (BHTE), Branch Target ICache (BTIC) 162 * Dynamic Power Management (DPM), Speculative (SPD) 163 * Clear Instruction cache throttling (ICTC) 164 */ 165setup_750_7400_hid0: 166 mfspr r11,SPRN_HID0 167 ori r11,r11,HID0_SGE | HID0_ABE | HID0_BHTE | HID0_BTIC 168 oris r11,r11,HID0_DPM@h 169BEGIN_FTR_SECTION 170 xori r11,r11,HID0_BTIC 171END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC) 172BEGIN_FTR_SECTION 173 xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */ 174END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM) 175 li r3,HID0_SPD 176 andc r11,r11,r3 /* clear SPD: enable speculative */ 177 li r3,0 178 mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */ 179 isync 180 mtspr SPRN_HID0,r11 181 sync 182 isync 183 blr 184 185/* 750cx specific 186 * Looks like we have to disable NAP feature for some PLL settings... 187 * (waiting for confirmation) 188 */ 189setup_750cx: 190 mfspr r10, SPRN_HID1 191 rlwinm r10,r10,4,28,31 192 cmpwi cr0,r10,7 193 cmpwi cr1,r10,9 194 cmpwi cr2,r10,11 195 cror 4*cr0+eq,4*cr0+eq,4*cr1+eq 196 cror 4*cr0+eq,4*cr0+eq,4*cr2+eq 197 bnelr 198 lwz r6,CPU_SPEC_FEATURES(r4) 199 li r7,CPU_FTR_CAN_NAP 200 andc r6,r6,r7 201 stw r6,CPU_SPEC_FEATURES(r4) 202 blr 203 204/* 750fx specific 205 */ 206setup_750fx: 207 blr 208 209/* MPC 745x 210 * Enable Store Gathering (SGE), Branch Folding (FOLD) 211 * Branch History Table (BHTE), Branch Target ICache (BTIC) 212 * Dynamic Power Management (DPM), Speculative (SPD) 213 * Ensure our data cache instructions really operate. 214 * Timebase has to be running or we wouldn't have made it here, 215 * just ensure we don't disable it. 216 * Clear Instruction cache throttling (ICTC) 217 * Enable L2 HW prefetch 218 */ 219setup_745x_specifics: 220 /* We check for the presence of an L3 cache setup by 221 * the firmware. If any, we disable NAP capability as 222 * it's known to be bogus on rev 2.1 and earlier 223 */ 224BEGIN_FTR_SECTION 225 mfspr r11,SPRN_L3CR 226 andis. r11,r11,L3CR_L3E@h 227 beq 1f 228END_FTR_SECTION_IFSET(CPU_FTR_L3CR) 229 lwz r6,CPU_SPEC_FEATURES(r4) 230 andis. r0,r6,CPU_FTR_L3_DISABLE_NAP@h 231 beq 1f 232 li r7,CPU_FTR_CAN_NAP 233 andc r6,r6,r7 234 stw r6,CPU_SPEC_FEATURES(r4) 2351: 236 mfspr r11,SPRN_HID0 237 238 /* All of the bits we have to set..... 239 */ 240 ori r11,r11,HID0_SGE | HID0_FOLD | HID0_BHTE 241 ori r11,r11,HID0_LRSTK | HID0_BTIC 242 oris r11,r11,HID0_DPM@h 243BEGIN_MMU_FTR_SECTION 244 oris r11,r11,HID0_HIGH_BAT@h 245END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_HIGH_BATS) 246BEGIN_FTR_SECTION 247 xori r11,r11,HID0_BTIC 248END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC) 249BEGIN_FTR_SECTION 250 xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */ 251END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM) 252 253 /* All of the bits we have to clear.... 254 */ 255 li r3,HID0_SPD | HID0_NOPDST | HID0_NOPTI 256 andc r11,r11,r3 /* clear SPD: enable speculative */ 257 li r3,0 258 259 mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */ 260 isync 261 mtspr SPRN_HID0,r11 262 sync 263 isync 264 265 /* Enable L2 HW prefetch, if L2 is enabled 266 */ 267 mfspr r3,SPRN_L2CR 268 andis. r3,r3,L2CR_L2E@h 269 beqlr 270 mfspr r3,SPRN_MSSCR0 271 ori r3,r3,3 272 sync 273 mtspr SPRN_MSSCR0,r3 274 sync 275 isync 276 blr 277 278/* 279 * Initialize the FPU registers. This is needed to work around an errata 280 * in some 750 cpus where using a not yet initialized FPU register after 281 * power on reset may hang the CPU 282 */ 283_GLOBAL(__init_fpu_registers) 284 mfmsr r10 285 ori r11,r10,MSR_FP 286 mtmsr r11 287 isync 288 addis r9,r3,empty_zero_page@ha 289 addi r9,r9,empty_zero_page@l 290 REST_32FPRS(0,r9) 291 sync 292 mtmsr r10 293 isync 294 blr 295 296 297/* Definitions for the table use to save CPU states */ 298#define CS_HID0 0 299#define CS_HID1 4 300#define CS_HID2 8 301#define CS_MSSCR0 12 302#define CS_MSSSR0 16 303#define CS_ICTRL 20 304#define CS_LDSTCR 24 305#define CS_LDSTDB 28 306#define CS_SIZE 32 307 308 .data 309 .balign L1_CACHE_BYTES 310cpu_state_storage: 311 .space CS_SIZE 312 .balign L1_CACHE_BYTES,0 313 .text 314 315/* Called in normal context to backup CPU 0 state. This 316 * does not include cache settings. This function is also 317 * called for machine sleep. This does not include the MMU 318 * setup, BATs, etc... but rather the "special" registers 319 * like HID0, HID1, MSSCR0, etc... 320 */ 321_GLOBAL(__save_cpu_setup) 322 /* Some CR fields are volatile, we back it up all */ 323 mfcr r7 324 325 /* Get storage ptr */ 326 lis r5,cpu_state_storage@h 327 ori r5,r5,cpu_state_storage@l 328 329 /* Save HID0 (common to all CONFIG_6xx cpus) */ 330 mfspr r3,SPRN_HID0 331 stw r3,CS_HID0(r5) 332 333 /* Now deal with CPU type dependent registers */ 334 mfspr r3,SPRN_PVR 335 srwi r3,r3,16 336 cmplwi cr0,r3,0x8000 /* 7450 */ 337 cmplwi cr1,r3,0x000c /* 7400 */ 338 cmplwi cr2,r3,0x800c /* 7410 */ 339 cmplwi cr3,r3,0x8001 /* 7455 */ 340 cmplwi cr4,r3,0x8002 /* 7457 */ 341 cmplwi cr5,r3,0x8003 /* 7447A */ 342 cmplwi cr6,r3,0x7000 /* 750FX */ 343 cmplwi cr7,r3,0x8004 /* 7448 */ 344 /* cr1 is 7400 || 7410 */ 345 cror 4*cr1+eq,4*cr1+eq,4*cr2+eq 346 /* cr0 is 74xx */ 347 cror 4*cr0+eq,4*cr0+eq,4*cr3+eq 348 cror 4*cr0+eq,4*cr0+eq,4*cr4+eq 349 cror 4*cr0+eq,4*cr0+eq,4*cr1+eq 350 cror 4*cr0+eq,4*cr0+eq,4*cr5+eq 351 cror 4*cr0+eq,4*cr0+eq,4*cr7+eq 352 bne 1f 353 /* Backup 74xx specific regs */ 354 mfspr r4,SPRN_MSSCR0 355 stw r4,CS_MSSCR0(r5) 356 mfspr r4,SPRN_MSSSR0 357 stw r4,CS_MSSSR0(r5) 358 beq cr1,1f 359 /* Backup 745x specific registers */ 360 mfspr r4,SPRN_HID1 361 stw r4,CS_HID1(r5) 362 mfspr r4,SPRN_ICTRL 363 stw r4,CS_ICTRL(r5) 364 mfspr r4,SPRN_LDSTCR 365 stw r4,CS_LDSTCR(r5) 366 mfspr r4,SPRN_LDSTDB 367 stw r4,CS_LDSTDB(r5) 3681: 369 bne cr6,1f 370 /* Backup 750FX specific registers */ 371 mfspr r4,SPRN_HID1 372 stw r4,CS_HID1(r5) 373 /* If rev 2.x, backup HID2 */ 374 mfspr r3,SPRN_PVR 375 andi. r3,r3,0xff00 376 cmpwi cr0,r3,0x0200 377 bne 1f 378 mfspr r4,SPRN_HID2 379 stw r4,CS_HID2(r5) 3801: 381 mtcr r7 382 blr 383 384/* Called with no MMU context (typically MSR:IR/DR off) to 385 * restore CPU state as backed up by the previous 386 * function. This does not include cache setting 387 */ 388_GLOBAL(__restore_cpu_setup) 389 /* Some CR fields are volatile, we back it up all */ 390 mfcr r7 391 392 /* Get storage ptr */ 393 lis r5,(cpu_state_storage-KERNELBASE)@h 394 ori r5,r5,cpu_state_storage@l 395 396 /* Restore HID0 */ 397 lwz r3,CS_HID0(r5) 398 sync 399 isync 400 mtspr SPRN_HID0,r3 401 sync 402 isync 403 404 /* Now deal with CPU type dependent registers */ 405 mfspr r3,SPRN_PVR 406 srwi r3,r3,16 407 cmplwi cr0,r3,0x8000 /* 7450 */ 408 cmplwi cr1,r3,0x000c /* 7400 */ 409 cmplwi cr2,r3,0x800c /* 7410 */ 410 cmplwi cr3,r3,0x8001 /* 7455 */ 411 cmplwi cr4,r3,0x8002 /* 7457 */ 412 cmplwi cr5,r3,0x8003 /* 7447A */ 413 cmplwi cr6,r3,0x7000 /* 750FX */ 414 cmplwi cr7,r3,0x8004 /* 7448 */ 415 /* cr1 is 7400 || 7410 */ 416 cror 4*cr1+eq,4*cr1+eq,4*cr2+eq 417 /* cr0 is 74xx */ 418 cror 4*cr0+eq,4*cr0+eq,4*cr3+eq 419 cror 4*cr0+eq,4*cr0+eq,4*cr4+eq 420 cror 4*cr0+eq,4*cr0+eq,4*cr1+eq 421 cror 4*cr0+eq,4*cr0+eq,4*cr5+eq 422 cror 4*cr0+eq,4*cr0+eq,4*cr7+eq 423 bne 2f 424 /* Restore 74xx specific regs */ 425 lwz r4,CS_MSSCR0(r5) 426 sync 427 mtspr SPRN_MSSCR0,r4 428 sync 429 isync 430 lwz r4,CS_MSSSR0(r5) 431 sync 432 mtspr SPRN_MSSSR0,r4 433 sync 434 isync 435 bne cr2,1f 436 /* Clear 7410 L2CR2 */ 437 li r4,0 438 mtspr SPRN_L2CR2,r4 4391: beq cr1,2f 440 /* Restore 745x specific registers */ 441 lwz r4,CS_HID1(r5) 442 sync 443 mtspr SPRN_HID1,r4 444 isync 445 sync 446 lwz r4,CS_ICTRL(r5) 447 sync 448 mtspr SPRN_ICTRL,r4 449 isync 450 sync 451 lwz r4,CS_LDSTCR(r5) 452 sync 453 mtspr SPRN_LDSTCR,r4 454 isync 455 sync 456 lwz r4,CS_LDSTDB(r5) 457 sync 458 mtspr SPRN_LDSTDB,r4 459 isync 460 sync 4612: bne cr6,1f 462 /* Restore 750FX specific registers 463 * that is restore HID2 on rev 2.x and PLL config & switch 464 * to PLL 0 on all 465 */ 466 /* If rev 2.x, restore HID2 with low voltage bit cleared */ 467 mfspr r3,SPRN_PVR 468 andi. r3,r3,0xff00 469 cmpwi cr0,r3,0x0200 470 bne 4f 471 lwz r4,CS_HID2(r5) 472 rlwinm r4,r4,0,19,17 473 mtspr SPRN_HID2,r4 474 sync 4754: 476 lwz r4,CS_HID1(r5) 477 rlwinm r5,r4,0,16,14 478 mtspr SPRN_HID1,r5 479 /* Wait for PLL to stabilize */ 480 mftbl r5 4813: mftbl r6 482 sub r6,r6,r5 483 cmplwi cr0,r6,10000 484 ble 3b 485 /* Setup final PLL */ 486 mtspr SPRN_HID1,r4 4871: 488 mtcr r7 489 blr 490 491