1/* 2 * This file contains low level CPU setup functions. 3 * Copyright (C) 2003 Benjamin Herrenschmidt (benh@kernel.crashing.org) 4 * 5 * This program is free software; you can redistribute it and/or 6 * modify it under the terms of the GNU General Public License 7 * as published by the Free Software Foundation; either version 8 * 2 of the License, or (at your option) any later version. 9 * 10 */ 11 12#include <linux/config.h> 13#include <asm/processor.h> 14#include <asm/page.h> 15#include <asm/ppc_asm.h> 16#include <asm/cputable.h> 17#include <asm/ppc_asm.h> 18#include <asm/cache.h> 19#include "ppc_defs.h" 20 21_GLOBAL(__setup_cpu_601) 22 blr 23_GLOBAL(__setup_cpu_603) 24 b setup_common_caches 25_GLOBAL(__setup_cpu_604) 26 mflr r4 27 bl setup_common_caches 28 bl setup_604_hid0 29 mtlr r4 30 blr 31_GLOBAL(__setup_cpu_750) 32 mflr r4 33 bl setup_common_caches 34 bl setup_750_7400_hid0 35 mtlr r4 36 blr 37_GLOBAL(__setup_cpu_750cx) 38 mflr r4 39 bl setup_common_caches 40 bl setup_750_7400_hid0 41 bl setup_750cx 42 mtlr r4 43 blr 44_GLOBAL(__setup_cpu_750fx) 45 mflr r4 46 bl setup_common_caches 47 bl setup_750_7400_hid0 48 bl setup_750fx 49 mtlr r4 50 blr 51_GLOBAL(__setup_cpu_7400) 52 mflr r4 53 bl setup_7400_workarounds 54 bl setup_common_caches 55 bl setup_750_7400_hid0 56 mtlr r4 57 blr 58_GLOBAL(__setup_cpu_7410) 59 mflr r4 60 bl setup_7410_workarounds 61 bl setup_common_caches 62 bl setup_750_7400_hid0 63 li r3,0 64 mtspr SPRN_L2CR2,r3 65 mtlr r4 66 blr 67_GLOBAL(__setup_cpu_745x) 68 mflr r4 69 bl setup_common_caches 70 bl setup_745x_specifics 71 mtlr r4 72 blr 73 74/* Enable caches for 603's, 604, 750 & 7400 */ 75setup_common_caches: 76 mfspr r11,HID0 77 andi. r0,r11,HID0_DCE 78 ori r11,r11,HID0_ICE|HID0_DCE 79 ori r8,r11,HID0_ICFI 80 bne 1f /* don't invalidate the D-cache */ 81 ori r8,r8,HID0_DCI /* unless it wasn't enabled */ 821: sync 83 mtspr HID0,r8 /* enable and invalidate caches */ 84 sync 85 mtspr HID0,r11 /* enable caches */ 86 sync 87 isync 88 blr 89 90/* 604, 604e, 604ev, ... 91 * Enable superscalar execution & branch history table 92 */ 93setup_604_hid0: 94 mfspr r11,HID0 95 ori r11,r11,HID0_SIED|HID0_BHTE 96 ori r8,r11,HID0_BTCD 97 sync 98 mtspr HID0,r8 /* flush branch target address cache */ 99 sync /* on 604e/604r */ 100 mtspr HID0,r11 101 sync 102 isync 103 blr 104 105/* 7400 <= rev 2.7 and 7410 rev = 1.0 suffer from some 106 * erratas we work around here. 107 * Moto MPC7410CE.pdf describes them, those are errata 108 * #3, #4 and #5 (7400 erratas #13, #14 and #15). 109 * Note that we assume the firmware didn't choose to 110 * apply other workarounds (there are other ones documented 111 * in the .pdf). It appear that Apple firmware only works 112 * around #3 and with the same fix we use. We may want to 113 * check if the CPU is using 60x bus mode in which case 114 * the workaround for errata #4 is useless. Also, we may 115 * want to explicitely clear HID0_NOPDST as this is not 116 * needed once we have applied workaround #5 (though it's 117 * not set by Apple's firmware at least). 118 */ 119setup_7400_workarounds: 120 mfpvr r3 121 rlwinm r3,r3,0,20,31 122 cmpwi 0,r3,0x0207 123 ble 1f 124 blr 125setup_7410_workarounds: 126 mfpvr r3 127 rlwinm r3,r3,0,20,31 128 cmpwi 0,r3,0x0100 129 bnelr 1301: 131 mfspr r11,SPRN_MSSSR0 132 /* Errata #3: Set L1OPQ_SIZE to 0x10 */ 133 rlwinm r11,r11,0,9,6 134 oris r11,r11,0x0100 135 /* Errata #4: Set L2MQ_SIZE to 1 (check for MPX mode first ?) */ 136 oris r11,r11,0x0002 137 /* Errata #5: Set DRLT_SIZE to 0x01 */ 138 rlwinm r11,r11,0,5,2 139 oris r11,r11,0x0800 140 sync 141 mtspr SPRN_MSSSR0,r11 142 sync 143 isync 144 blr 145 146/* 740/750/7400/7410 147 * Enable Store Gathering (SGE), Address Brodcast (ABE), 148 * Branch History Table (BHTE), Branch Target ICache (BTIC) 149 * Dynamic Power Management (DPM), Speculative (SPD) 150 * Clear Instruction cache throttling (ICTC) 151 */ 152setup_750_7400_hid0: 153 mfspr r11,HID0 154 ori r11,r11,HID0_SGE | HID0_ABE | HID0_BHTE | HID0_BTIC 155BEGIN_FTR_SECTION 156 oris r11,r11,HID0_DPM@h /* enable dynamic power mgmt */ 157END_FTR_SECTION_IFCLR(CPU_FTR_NO_DPM) 158 159 /* 7400 Errata #7, don't enable SGE on < Rev 2.1 */ 160 mfpvr r10 161 rlwinm r3,r10,16,16,31 162 cmplwi r3,0x000c 163 bne 1f /* Not a 7400. */ 164 andi. r3,r10,0x0f0f 165 cmpwi 0,r3,0x0200 166 bgt 1f /* Rev >= 2.1 */ 167 li r3,HID0_SGE /* 7400 rev < 2.1, clear SGE. */ 168 b 2f 169 1701: li r3,0 /* Don't clear SGE. */ 1712: ori r3,r3,HID0_SPD 172 andc r11,r11,r3 /* clear SPD: enable speculative */ 173 li r3,0 174 mtspr ICTC,r3 /* Instruction Cache Throttling off */ 175 isync 176 mtspr HID0,r11 177 sync 178 isync 179 blr 180 181/* 750cx specific 182 * Looks like we have to disable NAP feature for some PLL settings... 183 * (waiting for confirmation) 184 */ 185setup_750cx: 186 mfspr r10, SPRN_HID1 187 rlwinm r10,r10,4,28,31 188 cmpi cr0,r10,7 189 cmpi cr1,r10,9 190 cmpi cr2,r10,11 191 cror 4*cr0+eq,4*cr0+eq,4*cr1+eq 192 cror 4*cr0+eq,4*cr0+eq,4*cr2+eq 193 bnelr 194 lwz r6,CPU_SPEC_FEATURES(r5) 195 li r7,CPU_FTR_CAN_NAP 196 andc r6,r6,r7 197 stw r6,CPU_SPEC_FEATURES(r5) 198 blr 199 200/* 750fx specific 201 */ 202setup_750fx: 203 blr 204 205/* MPC 745x 206 * Enable Store Gathering (SGE), Branch Folding (FOLD) 207 * Branch History Table (BHTE), Branch Target ICache (BTIC) 208 * Dynamic Power Management (DPM), Speculative (SPD) 209 * Ensure our data cache instructions really operate. 210 * Timebase has to be running or we wouldn't have made it here, 211 * just ensure we don't disable it. 212 * Clear Instruction cache throttling (ICTC) 213 * Enable L2 HW prefetch 214 */ 215setup_745x_specifics: 216 /* We check for the presence of an L3 cache setup by 217 * the firmware. If any, we disable NAP capability as 218 * it's known to be bogus on rev 2.1 and earlier 219 */ 220 mfspr r11,SPRN_L3CR 221 andis. r11,r11,L3CR_L3E@h 222 beq 1f 223 lwz r6,CPU_SPEC_FEATURES(r5) 224 andi. r0,r6,CPU_FTR_L3_DISABLE_NAP 225 beq 1f 226 li r7,CPU_FTR_CAN_NAP 227 andc r6,r6,r7 228 stw r6,CPU_SPEC_FEATURES(r5) 2291: 230 mfspr r11,HID0 231 232 /* All of the bits we have to set..... 233 */ 234 ori r11,r11,HID0_SGE | HID0_FOLD | HID0_BHTE | HID0_BTIC | HID0_LRSTK 235BEGIN_FTR_SECTION 236 oris r11,r11,HID0_DPM@h /* enable dynamic power mgmt */ 237END_FTR_SECTION_IFCLR(CPU_FTR_NO_DPM) 238 239 /* We only want to disable BTIC on 7457's */ 240 mfpvr r3 241 rlwinm r3,r3,16,16,31 242 cmplwi r3,0x8002 243 li r3,0 /* Assume we don't want to clear BTIC. */ 244 bne 2f /* Not a 7457. */ 245 li r3,HID0_BTIC /* 7457, clear BTIC. */ 246 247 /* All of the bits we have to clear.... 248 */ 2492: ori r3,r3,HID0_SPD | HID0_NOPDST | HID0_NOPTI 250 andc r11,r11,r3 /* clear SPD: enable speculative */ 251 li r3,0 252 253 mtspr ICTC,r3 /* Instruction Cache Throttling off */ 254 isync 255 mtspr HID0,r11 256 sync 257 isync 258 259 /* Enable L2 HW prefetch 260 */ 261 mfspr r3,SPRN_MSSCR0 262 ori r3,r3,3 263 sync 264 mtspr SPRN_MSSCR0,r3 265 sync 266 isync 267 blr 268 269/* Definitions for the table use to save CPU states */ 270#define CS_HID0 0 271#define CS_HID1 4 272#define CS_HID2 8 273#define CS_MSSCR0 12 274#define CS_MSSSR0 16 275#define CS_ICTRL 20 276#define CS_LDSTCR 24 277#define CS_LDSTDB 28 278#define CS_SIZE 32 279 280 .data 281 .balign L1_CACHE_LINE_SIZE 282cpu_state_storage: 283 .space CS_SIZE 284 .balign L1_CACHE_LINE_SIZE,0 285 .text 286 287/* Called in normal context to backup CPU 0 state. This 288 * does not include cache settings. This function is also 289 * called for machine sleep. This does not include the MMU 290 * setup, BATs, etc... but rather the "special" registers 291 * like HID0, HID1, MSSCR0, etc... 292 */ 293_GLOBAL(__save_cpu_setup) 294 /* Some CR fields are volatile, we back it up all */ 295 mfcr r7 296 297 /* Get storage ptr */ 298 lis r5,cpu_state_storage@h 299 ori r5,r5,cpu_state_storage@l 300 301 /* Save HID0 (common to all CONFIG_6xx cpus) */ 302 mfspr r3,SPRN_HID0 303 stw r3,CS_HID0(r5) 304 305 /* Now deal with CPU type dependent registers */ 306 mfspr r3,PVR 307 srwi r3,r3,16 308 cmpli cr0,r3,0x8000 /* 7450 */ 309 cmpli cr1,r3,0x000c /* 7400 */ 310 cmpli cr2,r3,0x800c /* 7410 */ 311 cmpli cr3,r3,0x8001 /* 7455 */ 312 cmpli cr4,r3,0x8002 /* 7457 */ 313 cmpli cr5,r3,0x7000 /* 750FX */ 314 /* cr1 is 7400 || 7410 */ 315 cror 4*cr1+eq,4*cr1+eq,4*cr2+eq 316 /* cr0 is 74xx */ 317 cror 4*cr0+eq,4*cr0+eq,4*cr3+eq 318 cror 4*cr0+eq,4*cr0+eq,4*cr4+eq 319 cror 4*cr0+eq,4*cr0+eq,4*cr1+eq 320 bne 1f 321 /* Backup 74xx specific regs */ 322 mfspr r4,SPRN_MSSCR0 323 stw r4,CS_MSSCR0(r5) 324 mfspr r4,SPRN_MSSSR0 325 stw r4,CS_MSSSR0(r5) 326 beq cr1,1f 327 /* Backup 745x specific registers */ 328 mfspr r4,SPRN_HID1 329 stw r4,CS_HID1(r5) 330 mfspr r4,SPRN_ICTRL 331 stw r4,CS_ICTRL(r5) 332 mfspr r4,SPRN_LDSTCR 333 stw r4,CS_LDSTCR(r5) 334 mfspr r4,SPRN_LDSTDB 335 stw r4,CS_LDSTDB(r5) 3361: 337 bne cr5,1f 338 /* Backup 750FX specific registers */ 339 mfspr r4,SPRN_HID1 340 stw r4,CS_HID1(r5) 341 /* If rev 2.x, backup HID2 */ 342 mfspr r3,PVR 343 andi. r3,r3,0xff00 344 cmpi cr0,r3,0x0200 345 bne 1f 346 mfspr r4,SPRN_HID2 347 stw r4,CS_HID2(r5) 3481: 349 mtcr r7 350 blr 351 352/* Called with no MMU context (typically MSR:IR/DR off) to 353 * restore CPU state as backed up by the previous 354 * function. This does not include cache setting 355 */ 356_GLOBAL(__restore_cpu_setup) 357 /* Some CR fields are volatile, we back it up all */ 358 mfcr r7 359 360 /* Get storage ptr */ 361 lis r5,(cpu_state_storage-KERNELBASE)@h 362 ori r5,r5,cpu_state_storage@l 363 364 /* Restore HID0 */ 365 lwz r3,CS_HID0(r5) 366 sync 367 isync 368 mtspr SPRN_HID0,r3 369 sync 370 isync 371 372 /* Now deal with CPU type dependent registers */ 373 mfspr r3,PVR 374 srwi r3,r3,16 375 cmpli cr0,r3,0x8000 /* 7450 */ 376 cmpli cr1,r3,0x000c /* 7400 */ 377 cmpli cr2,r3,0x800c /* 7410 */ 378 cmpli cr3,r3,0x8001 /* 7455 */ 379 cmpli cr4,r3,0x8002 /* 7457 */ 380 cmpli cr5,r3,0x7000 /* 750FX */ 381 /* cr1 is 7400 || 7410 */ 382 cror 4*cr1+eq,4*cr1+eq,4*cr2+eq 383 /* cr0 is 74xx */ 384 cror 4*cr0+eq,4*cr0+eq,4*cr3+eq 385 cror 4*cr0+eq,4*cr0+eq,4*cr4+eq 386 cror 4*cr0+eq,4*cr0+eq,4*cr1+eq 387 bne 2f 388 /* Restore 74xx specific regs */ 389 lwz r4,CS_MSSCR0(r5) 390 sync 391 mtspr SPRN_MSSCR0,r4 392 sync 393 isync 394 lwz r4,CS_MSSSR0(r5) 395 sync 396 mtspr SPRN_MSSSR0,r4 397 sync 398 isync 399 bne cr2,1f 400 /* Clear 7410 L2CR2 */ 401 li r4,0 402 mtspr SPRN_L2CR2,r4 4031: beq cr1,2f 404 /* Restore 745x specific registers */ 405 lwz r4,CS_HID1(r5) 406 sync 407 mtspr SPRN_HID1,r4 408 isync 409 sync 410 lwz r4,CS_ICTRL(r5) 411 sync 412 mtspr SPRN_ICTRL,r4 413 isync 414 sync 415 lwz r4,CS_LDSTCR(r5) 416 sync 417 mtspr SPRN_LDSTCR,r4 418 isync 419 sync 420 lwz r4,CS_LDSTDB(r5) 421 sync 422 mtspr SPRN_LDSTDB,r4 423 isync 424 sync 4252: bne cr5,1f 426 /* Restore 750FX specific registers 427 * that is restore HID2 on rev 2.x and PLL config & switch 428 * to PLL 0 on all 429 */ 430 /* If rev 2.x, restore HID2 with low voltage bit cleared */ 431 mfspr r3,PVR 432 andi. r3,r3,0xff00 433 cmpi cr0,r3,0x0200 434 bne 4f 435 lwz r4,CS_HID2(r5) 436 rlwinm r4,r4,0,19,17 437 mtspr SPRN_HID2,r4 438 sync 4394: 440 lwz r4,CS_HID1(r5) 441 rlwinm r5,r4,0,16,14 442 mtspr SPRN_HID1,r5 443 /* Wait for PLL to stabilize */ 444 mftbl r5 4453: mftbl r6 446 sub r6,r6,r5 447 cmpli cr0,r6,10000 448 ble 3b 449 /* Setup final PLL */ 450 mtspr SPRN_HID1,r4 4511: 452 mtcr r7 453 blr 454 455