cpu_setup_6xx.S 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489
  1. /*
  2. * This file contains low level CPU setup functions.
  3. * Copyright (C) 2003 Benjamin Herrenschmidt (benh@kernel.crashing.org)
  4. *
  5. * This program is free software; you can redistribute it and/or
  6. * modify it under the terms of the GNU General Public License
  7. * as published by the Free Software Foundation; either version
  8. * 2 of the License, or (at your option) any later version.
  9. *
  10. */
  11. #include <asm/processor.h>
  12. #include <asm/page.h>
  13. #include <asm/cputable.h>
  14. #include <asm/ppc_asm.h>
  15. #include <asm/asm-offsets.h>
  16. #include <asm/cache.h>
  17. #include <asm/mmu.h>
  18. _GLOBAL(__setup_cpu_603)
  19. mflr r5
  20. BEGIN_MMU_FTR_SECTION
  21. li r10,0
  22. mtspr SPRN_SPRG_603_LRU,r10 /* init SW LRU tracking */
  23. END_MMU_FTR_SECTION_IFSET(MMU_FTR_NEED_DTLB_SW_LRU)
  24. BEGIN_FTR_SECTION
  25. bl __init_fpu_registers
  26. END_FTR_SECTION_IFCLR(CPU_FTR_FPU_UNAVAILABLE)
  27. bl setup_common_caches
  28. mtlr r5
  29. blr
  30. _GLOBAL(__setup_cpu_604)
  31. mflr r5
  32. bl setup_common_caches
  33. bl setup_604_hid0
  34. mtlr r5
  35. blr
  36. _GLOBAL(__setup_cpu_750)
  37. mflr r5
  38. bl __init_fpu_registers
  39. bl setup_common_caches
  40. bl setup_750_7400_hid0
  41. mtlr r5
  42. blr
  43. _GLOBAL(__setup_cpu_750cx)
  44. mflr r5
  45. bl __init_fpu_registers
  46. bl setup_common_caches
  47. bl setup_750_7400_hid0
  48. bl setup_750cx
  49. mtlr r5
  50. blr
  51. _GLOBAL(__setup_cpu_750fx)
  52. mflr r5
  53. bl __init_fpu_registers
  54. bl setup_common_caches
  55. bl setup_750_7400_hid0
  56. bl setup_750fx
  57. mtlr r5
  58. blr
  59. _GLOBAL(__setup_cpu_7400)
  60. mflr r5
  61. bl __init_fpu_registers
  62. bl setup_7400_workarounds
  63. bl setup_common_caches
  64. bl setup_750_7400_hid0
  65. mtlr r5
  66. blr
  67. _GLOBAL(__setup_cpu_7410)
  68. mflr r5
  69. bl __init_fpu_registers
  70. bl setup_7410_workarounds
  71. bl setup_common_caches
  72. bl setup_750_7400_hid0
  73. li r3,0
  74. mtspr SPRN_L2CR2,r3
  75. mtlr r5
  76. blr
  77. _GLOBAL(__setup_cpu_745x)
  78. mflr r5
  79. bl setup_common_caches
  80. bl setup_745x_specifics
  81. mtlr r5
  82. blr
  83. /* Enable caches for 603's, 604, 750 & 7400 */
  84. setup_common_caches:
  85. mfspr r11,SPRN_HID0
  86. andi. r0,r11,HID0_DCE
  87. ori r11,r11,HID0_ICE|HID0_DCE
  88. ori r8,r11,HID0_ICFI
  89. bne 1f /* don't invalidate the D-cache */
  90. ori r8,r8,HID0_DCI /* unless it wasn't enabled */
  91. 1: sync
  92. mtspr SPRN_HID0,r8 /* enable and invalidate caches */
  93. sync
  94. mtspr SPRN_HID0,r11 /* enable caches */
  95. sync
  96. isync
  97. blr
  98. /* 604, 604e, 604ev, ...
  99. * Enable superscalar execution & branch history table
  100. */
  101. setup_604_hid0:
  102. mfspr r11,SPRN_HID0
  103. ori r11,r11,HID0_SIED|HID0_BHTE
  104. ori r8,r11,HID0_BTCD
  105. sync
  106. mtspr SPRN_HID0,r8 /* flush branch target address cache */
  107. sync /* on 604e/604r */
  108. mtspr SPRN_HID0,r11
  109. sync
  110. isync
  111. blr
  112. /* 7400 <= rev 2.7 and 7410 rev = 1.0 suffer from some
  113. * erratas we work around here.
  114. * Moto MPC710CE.pdf describes them, those are errata
  115. * #3, #4 and #5
  116. * Note that we assume the firmware didn't choose to
  117. * apply other workarounds (there are other ones documented
  118. * in the .pdf). It appear that Apple firmware only works
  119. * around #3 and with the same fix we use. We may want to
  120. * check if the CPU is using 60x bus mode in which case
  121. * the workaround for errata #4 is useless. Also, we may
  122. * want to explicitly clear HID0_NOPDST as this is not
  123. * needed once we have applied workaround #5 (though it's
  124. * not set by Apple's firmware at least).
  125. */
  126. setup_7400_workarounds:
  127. mfpvr r3
  128. rlwinm r3,r3,0,20,31
  129. cmpwi 0,r3,0x0207
  130. ble 1f
  131. blr
  132. setup_7410_workarounds:
  133. mfpvr r3
  134. rlwinm r3,r3,0,20,31
  135. cmpwi 0,r3,0x0100
  136. bnelr
  137. 1:
  138. mfspr r11,SPRN_MSSSR0
  139. /* Errata #3: Set L1OPQ_SIZE to 0x10 */
  140. rlwinm r11,r11,0,9,6
  141. oris r11,r11,0x0100
  142. /* Errata #4: Set L2MQ_SIZE to 1 (check for MPX mode first ?) */
  143. oris r11,r11,0x0002
  144. /* Errata #5: Set DRLT_SIZE to 0x01 */
  145. rlwinm r11,r11,0,5,2
  146. oris r11,r11,0x0800
  147. sync
  148. mtspr SPRN_MSSSR0,r11
  149. sync
  150. isync
  151. blr
  152. /* 740/750/7400/7410
  153. * Enable Store Gathering (SGE), Address Brodcast (ABE),
  154. * Branch History Table (BHTE), Branch Target ICache (BTIC)
  155. * Dynamic Power Management (DPM), Speculative (SPD)
  156. * Clear Instruction cache throttling (ICTC)
  157. */
  158. setup_750_7400_hid0:
  159. mfspr r11,SPRN_HID0
  160. ori r11,r11,HID0_SGE | HID0_ABE | HID0_BHTE | HID0_BTIC
  161. oris r11,r11,HID0_DPM@h
  162. BEGIN_FTR_SECTION
  163. xori r11,r11,HID0_BTIC
  164. END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
  165. BEGIN_FTR_SECTION
  166. xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */
  167. END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
  168. li r3,HID0_SPD
  169. andc r11,r11,r3 /* clear SPD: enable speculative */
  170. li r3,0
  171. mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */
  172. isync
  173. mtspr SPRN_HID0,r11
  174. sync
  175. isync
  176. blr
  177. /* 750cx specific
  178. * Looks like we have to disable NAP feature for some PLL settings...
  179. * (waiting for confirmation)
  180. */
  181. setup_750cx:
  182. mfspr r10, SPRN_HID1
  183. rlwinm r10,r10,4,28,31
  184. cmpwi cr0,r10,7
  185. cmpwi cr1,r10,9
  186. cmpwi cr2,r10,11
  187. cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
  188. cror 4*cr0+eq,4*cr0+eq,4*cr2+eq
  189. bnelr
  190. lwz r6,CPU_SPEC_FEATURES(r4)
  191. li r7,CPU_FTR_CAN_NAP
  192. andc r6,r6,r7
  193. stw r6,CPU_SPEC_FEATURES(r4)
  194. blr
  195. /* 750fx specific
  196. */
  197. setup_750fx:
  198. blr
  199. /* MPC 745x
  200. * Enable Store Gathering (SGE), Branch Folding (FOLD)
  201. * Branch History Table (BHTE), Branch Target ICache (BTIC)
  202. * Dynamic Power Management (DPM), Speculative (SPD)
  203. * Ensure our data cache instructions really operate.
  204. * Timebase has to be running or we wouldn't have made it here,
  205. * just ensure we don't disable it.
  206. * Clear Instruction cache throttling (ICTC)
  207. * Enable L2 HW prefetch
  208. */
  209. setup_745x_specifics:
  210. /* We check for the presence of an L3 cache setup by
  211. * the firmware. If any, we disable NAP capability as
  212. * it's known to be bogus on rev 2.1 and earlier
  213. */
  214. BEGIN_FTR_SECTION
  215. mfspr r11,SPRN_L3CR
  216. andis. r11,r11,L3CR_L3E@h
  217. beq 1f
  218. END_FTR_SECTION_IFSET(CPU_FTR_L3CR)
  219. lwz r6,CPU_SPEC_FEATURES(r4)
  220. andi. r0,r6,CPU_FTR_L3_DISABLE_NAP
  221. beq 1f
  222. li r7,CPU_FTR_CAN_NAP
  223. andc r6,r6,r7
  224. stw r6,CPU_SPEC_FEATURES(r4)
  225. 1:
  226. mfspr r11,SPRN_HID0
  227. /* All of the bits we have to set.....
  228. */
  229. ori r11,r11,HID0_SGE | HID0_FOLD | HID0_BHTE
  230. ori r11,r11,HID0_LRSTK | HID0_BTIC
  231. oris r11,r11,HID0_DPM@h
  232. BEGIN_MMU_FTR_SECTION
  233. oris r11,r11,HID0_HIGH_BAT@h
  234. END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_HIGH_BATS)
  235. BEGIN_FTR_SECTION
  236. xori r11,r11,HID0_BTIC
  237. END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
  238. BEGIN_FTR_SECTION
  239. xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */
  240. END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
  241. /* All of the bits we have to clear....
  242. */
  243. li r3,HID0_SPD | HID0_NOPDST | HID0_NOPTI
  244. andc r11,r11,r3 /* clear SPD: enable speculative */
  245. li r3,0
  246. mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */
  247. isync
  248. mtspr SPRN_HID0,r11
  249. sync
  250. isync
  251. /* Enable L2 HW prefetch, if L2 is enabled
  252. */
  253. mfspr r3,SPRN_L2CR
  254. andis. r3,r3,L2CR_L2E@h
  255. beqlr
  256. mfspr r3,SPRN_MSSCR0
  257. ori r3,r3,3
  258. sync
  259. mtspr SPRN_MSSCR0,r3
  260. sync
  261. isync
  262. blr
  263. /*
  264. * Initialize the FPU registers. This is needed to work around an errata
  265. * in some 750 cpus where using a not yet initialized FPU register after
  266. * power on reset may hang the CPU
  267. */
  268. _GLOBAL(__init_fpu_registers)
  269. mfmsr r10
  270. ori r11,r10,MSR_FP
  271. mtmsr r11
  272. isync
  273. addis r9,r3,empty_zero_page@ha
  274. addi r9,r9,empty_zero_page@l
  275. REST_32FPRS(0,r9)
  276. sync
  277. mtmsr r10
  278. isync
  279. blr
  280. /* Definitions for the table use to save CPU states */
  281. #define CS_HID0 0
  282. #define CS_HID1 4
  283. #define CS_HID2 8
  284. #define CS_MSSCR0 12
  285. #define CS_MSSSR0 16
  286. #define CS_ICTRL 20
  287. #define CS_LDSTCR 24
  288. #define CS_LDSTDB 28
  289. #define CS_SIZE 32
  290. .data
  291. .balign L1_CACHE_BYTES
  292. cpu_state_storage:
  293. .space CS_SIZE
  294. .balign L1_CACHE_BYTES,0
  295. .text
  296. /* Called in normal context to backup CPU 0 state. This
  297. * does not include cache settings. This function is also
  298. * called for machine sleep. This does not include the MMU
  299. * setup, BATs, etc... but rather the "special" registers
  300. * like HID0, HID1, MSSCR0, etc...
  301. */
  302. _GLOBAL(__save_cpu_setup)
  303. /* Some CR fields are volatile, we back it up all */
  304. mfcr r7
  305. /* Get storage ptr */
  306. lis r5,cpu_state_storage@h
  307. ori r5,r5,cpu_state_storage@l
  308. /* Save HID0 (common to all CONFIG_6xx cpus) */
  309. mfspr r3,SPRN_HID0
  310. stw r3,CS_HID0(r5)
  311. /* Now deal with CPU type dependent registers */
  312. mfspr r3,SPRN_PVR
  313. srwi r3,r3,16
  314. cmplwi cr0,r3,0x8000 /* 7450 */
  315. cmplwi cr1,r3,0x000c /* 7400 */
  316. cmplwi cr2,r3,0x800c /* 7410 */
  317. cmplwi cr3,r3,0x8001 /* 7455 */
  318. cmplwi cr4,r3,0x8002 /* 7457 */
  319. cmplwi cr5,r3,0x8003 /* 7447A */
  320. cmplwi cr6,r3,0x7000 /* 750FX */
  321. cmplwi cr7,r3,0x8004 /* 7448 */
  322. /* cr1 is 7400 || 7410 */
  323. cror 4*cr1+eq,4*cr1+eq,4*cr2+eq
  324. /* cr0 is 74xx */
  325. cror 4*cr0+eq,4*cr0+eq,4*cr3+eq
  326. cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
  327. cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
  328. cror 4*cr0+eq,4*cr0+eq,4*cr5+eq
  329. cror 4*cr0+eq,4*cr0+eq,4*cr7+eq
  330. bne 1f
  331. /* Backup 74xx specific regs */
  332. mfspr r4,SPRN_MSSCR0
  333. stw r4,CS_MSSCR0(r5)
  334. mfspr r4,SPRN_MSSSR0
  335. stw r4,CS_MSSSR0(r5)
  336. beq cr1,1f
  337. /* Backup 745x specific registers */
  338. mfspr r4,SPRN_HID1
  339. stw r4,CS_HID1(r5)
  340. mfspr r4,SPRN_ICTRL
  341. stw r4,CS_ICTRL(r5)
  342. mfspr r4,SPRN_LDSTCR
  343. stw r4,CS_LDSTCR(r5)
  344. mfspr r4,SPRN_LDSTDB
  345. stw r4,CS_LDSTDB(r5)
  346. 1:
  347. bne cr6,1f
  348. /* Backup 750FX specific registers */
  349. mfspr r4,SPRN_HID1
  350. stw r4,CS_HID1(r5)
  351. /* If rev 2.x, backup HID2 */
  352. mfspr r3,SPRN_PVR
  353. andi. r3,r3,0xff00
  354. cmpwi cr0,r3,0x0200
  355. bne 1f
  356. mfspr r4,SPRN_HID2
  357. stw r4,CS_HID2(r5)
  358. 1:
  359. mtcr r7
  360. blr
  361. /* Called with no MMU context (typically MSR:IR/DR off) to
  362. * restore CPU state as backed up by the previous
  363. * function. This does not include cache setting
  364. */
  365. _GLOBAL(__restore_cpu_setup)
  366. /* Some CR fields are volatile, we back it up all */
  367. mfcr r7
  368. /* Get storage ptr */
  369. lis r5,(cpu_state_storage-KERNELBASE)@h
  370. ori r5,r5,cpu_state_storage@l
  371. /* Restore HID0 */
  372. lwz r3,CS_HID0(r5)
  373. sync
  374. isync
  375. mtspr SPRN_HID0,r3
  376. sync
  377. isync
  378. /* Now deal with CPU type dependent registers */
  379. mfspr r3,SPRN_PVR
  380. srwi r3,r3,16
  381. cmplwi cr0,r3,0x8000 /* 7450 */
  382. cmplwi cr1,r3,0x000c /* 7400 */
  383. cmplwi cr2,r3,0x800c /* 7410 */
  384. cmplwi cr3,r3,0x8001 /* 7455 */
  385. cmplwi cr4,r3,0x8002 /* 7457 */
  386. cmplwi cr5,r3,0x8003 /* 7447A */
  387. cmplwi cr6,r3,0x7000 /* 750FX */
  388. cmplwi cr7,r3,0x8004 /* 7448 */
  389. /* cr1 is 7400 || 7410 */
  390. cror 4*cr1+eq,4*cr1+eq,4*cr2+eq
  391. /* cr0 is 74xx */
  392. cror 4*cr0+eq,4*cr0+eq,4*cr3+eq
  393. cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
  394. cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
  395. cror 4*cr0+eq,4*cr0+eq,4*cr5+eq
  396. cror 4*cr0+eq,4*cr0+eq,4*cr7+eq
  397. bne 2f
  398. /* Restore 74xx specific regs */
  399. lwz r4,CS_MSSCR0(r5)
  400. sync
  401. mtspr SPRN_MSSCR0,r4
  402. sync
  403. isync
  404. lwz r4,CS_MSSSR0(r5)
  405. sync
  406. mtspr SPRN_MSSSR0,r4
  407. sync
  408. isync
  409. bne cr2,1f
  410. /* Clear 7410 L2CR2 */
  411. li r4,0
  412. mtspr SPRN_L2CR2,r4
  413. 1: beq cr1,2f
  414. /* Restore 745x specific registers */
  415. lwz r4,CS_HID1(r5)
  416. sync
  417. mtspr SPRN_HID1,r4
  418. isync
  419. sync
  420. lwz r4,CS_ICTRL(r5)
  421. sync
  422. mtspr SPRN_ICTRL,r4
  423. isync
  424. sync
  425. lwz r4,CS_LDSTCR(r5)
  426. sync
  427. mtspr SPRN_LDSTCR,r4
  428. isync
  429. sync
  430. lwz r4,CS_LDSTDB(r5)
  431. sync
  432. mtspr SPRN_LDSTDB,r4
  433. isync
  434. sync
  435. 2: bne cr6,1f
  436. /* Restore 750FX specific registers
  437. * that is restore HID2 on rev 2.x and PLL config & switch
  438. * to PLL 0 on all
  439. */
  440. /* If rev 2.x, restore HID2 with low voltage bit cleared */
  441. mfspr r3,SPRN_PVR
  442. andi. r3,r3,0xff00
  443. cmpwi cr0,r3,0x0200
  444. bne 4f
  445. lwz r4,CS_HID2(r5)
  446. rlwinm r4,r4,0,19,17
  447. mtspr SPRN_HID2,r4
  448. sync
  449. 4:
  450. lwz r4,CS_HID1(r5)
  451. rlwinm r5,r4,0,16,14
  452. mtspr SPRN_HID1,r5
  453. /* Wait for PLL to stabilize */
  454. mftbl r5
  455. 3: mftbl r6
  456. sub r6,r6,r5
  457. cmplwi cr0,r6,10000
  458. ble 3b
  459. /* Setup final PLL */
  460. mtspr SPRN_HID1,r4
  461. 1:
  462. mtcr r7
  463. blr