cpu_setup_6xx.S 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516
  1. /* SPDX-License-Identifier: GPL-2.0-or-later */
  2. /*
  3. * This file contains low level CPU setup functions.
  4. * Copyright (C) 2003 Benjamin Herrenschmidt (benh@kernel.crashing.org)
  5. */
  6. #include <linux/linkage.h>
  7. #include <asm/processor.h>
  8. #include <asm/page.h>
  9. #include <asm/cputable.h>
  10. #include <asm/ppc_asm.h>
  11. #include <asm/asm-offsets.h>
  12. #include <asm/cache.h>
  13. #include <asm/mmu.h>
  14. #include <asm/feature-fixups.h>
  15. _GLOBAL(__setup_cpu_603)
  16. mflr r5
  17. BEGIN_MMU_FTR_SECTION
  18. li r10,0
  19. mtspr SPRN_SPRG_603_LRU,r10 /* init SW LRU tracking */
  20. END_MMU_FTR_SECTION_IFSET(MMU_FTR_NEED_DTLB_SW_LRU)
  21. BEGIN_FTR_SECTION
  22. bl __init_fpu_registers
  23. END_FTR_SECTION_IFCLR(CPU_FTR_FPU_UNAVAILABLE)
  24. bl setup_common_caches
  25. /*
  26. * This assumes that all cores using __setup_cpu_603 with
  27. * MMU_FTR_USE_HIGH_BATS are G2_LE compatible
  28. */
  29. BEGIN_MMU_FTR_SECTION
  30. bl setup_g2_le_hid2
  31. END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_HIGH_BATS)
  32. mtlr r5
  33. blr
  34. _GLOBAL(__setup_cpu_604)
  35. mflr r5
  36. bl setup_common_caches
  37. bl setup_604_hid0
  38. mtlr r5
  39. blr
  40. _GLOBAL(__setup_cpu_750)
  41. mflr r5
  42. bl __init_fpu_registers
  43. bl setup_common_caches
  44. bl setup_750_7400_hid0
  45. mtlr r5
  46. blr
  47. _GLOBAL(__setup_cpu_750cx)
  48. mflr r5
  49. bl __init_fpu_registers
  50. bl setup_common_caches
  51. bl setup_750_7400_hid0
  52. bl setup_750cx
  53. mtlr r5
  54. blr
  55. _GLOBAL(__setup_cpu_750fx)
  56. mflr r5
  57. bl __init_fpu_registers
  58. bl setup_common_caches
  59. bl setup_750_7400_hid0
  60. bl setup_750fx
  61. mtlr r5
  62. blr
  63. _GLOBAL(__setup_cpu_7400)
  64. mflr r5
  65. bl __init_fpu_registers
  66. bl setup_7400_workarounds
  67. bl setup_common_caches
  68. bl setup_750_7400_hid0
  69. mtlr r5
  70. blr
  71. _GLOBAL(__setup_cpu_7410)
  72. mflr r5
  73. bl __init_fpu_registers
  74. bl setup_7410_workarounds
  75. bl setup_common_caches
  76. bl setup_750_7400_hid0
  77. li r3,0
  78. mtspr SPRN_L2CR2,r3
  79. mtlr r5
  80. blr
  81. _GLOBAL(__setup_cpu_745x)
  82. mflr r5
  83. bl setup_common_caches
  84. bl setup_745x_specifics
  85. mtlr r5
  86. blr
  87. /* Enable caches for 603's, 604, 750 & 7400 */
  88. SYM_FUNC_START_LOCAL(setup_common_caches)
  89. mfspr r11,SPRN_HID0
  90. andi. r0,r11,HID0_DCE
  91. ori r11,r11,HID0_ICE|HID0_DCE
  92. ori r8,r11,HID0_ICFI
  93. bne 1f /* don't invalidate the D-cache */
  94. ori r8,r8,HID0_DCI /* unless it wasn't enabled */
  95. 1: sync
  96. mtspr SPRN_HID0,r8 /* enable and invalidate caches */
  97. sync
  98. mtspr SPRN_HID0,r11 /* enable caches */
  99. sync
  100. isync
  101. blr
  102. SYM_FUNC_END(setup_common_caches)
  103. /* 604, 604e, 604ev, ...
  104. * Enable superscalar execution & branch history table
  105. */
  106. SYM_FUNC_START_LOCAL(setup_604_hid0)
  107. mfspr r11,SPRN_HID0
  108. ori r11,r11,HID0_SIED|HID0_BHTE
  109. ori r8,r11,HID0_BTCD
  110. sync
  111. mtspr SPRN_HID0,r8 /* flush branch target address cache */
  112. sync /* on 604e/604r */
  113. mtspr SPRN_HID0,r11
  114. sync
  115. isync
  116. blr
  117. SYM_FUNC_END(setup_604_hid0)
  118. /* Enable high BATs for G2_LE and derivatives like e300cX */
  119. SYM_FUNC_START_LOCAL(setup_g2_le_hid2)
  120. mfspr r11,SPRN_HID2_G2_LE
  121. oris r11,r11,HID2_G2_LE_HBE@h
  122. mtspr SPRN_HID2_G2_LE,r11
  123. sync
  124. isync
  125. blr
  126. SYM_FUNC_END(setup_g2_le_hid2)
  127. /* 7400 <= rev 2.7 and 7410 rev = 1.0 suffer from some
  128. * erratas we work around here.
  129. * Moto MPC710CE.pdf describes them, those are errata
  130. * #3, #4 and #5
  131. * Note that we assume the firmware didn't choose to
  132. * apply other workarounds (there are other ones documented
  133. * in the .pdf). It appear that Apple firmware only works
  134. * around #3 and with the same fix we use. We may want to
  135. * check if the CPU is using 60x bus mode in which case
  136. * the workaround for errata #4 is useless. Also, we may
  137. * want to explicitly clear HID0_NOPDST as this is not
  138. * needed once we have applied workaround #5 (though it's
  139. * not set by Apple's firmware at least).
  140. */
  141. SYM_FUNC_START_LOCAL(setup_7400_workarounds)
  142. mfpvr r3
  143. rlwinm r3,r3,0,20,31
  144. cmpwi 0,r3,0x0207
  145. ble 1f
  146. blr
  147. SYM_FUNC_END(setup_7400_workarounds)
  148. SYM_FUNC_START_LOCAL(setup_7410_workarounds)
  149. mfpvr r3
  150. rlwinm r3,r3,0,20,31
  151. cmpwi 0,r3,0x0100
  152. bnelr
  153. 1:
  154. mfspr r11,SPRN_MSSSR0
  155. /* Errata #3: Set L1OPQ_SIZE to 0x10 */
  156. rlwinm r11,r11,0,9,6
  157. oris r11,r11,0x0100
  158. /* Errata #4: Set L2MQ_SIZE to 1 (check for MPX mode first ?) */
  159. oris r11,r11,0x0002
  160. /* Errata #5: Set DRLT_SIZE to 0x01 */
  161. rlwinm r11,r11,0,5,2
  162. oris r11,r11,0x0800
  163. sync
  164. mtspr SPRN_MSSSR0,r11
  165. sync
  166. isync
  167. blr
  168. SYM_FUNC_END(setup_7410_workarounds)
  169. /* 740/750/7400/7410
  170. * Enable Store Gathering (SGE), Address Broadcast (ABE),
  171. * Branch History Table (BHTE), Branch Target ICache (BTIC)
  172. * Dynamic Power Management (DPM), Speculative (SPD)
  173. * Clear Instruction cache throttling (ICTC)
  174. */
  175. SYM_FUNC_START_LOCAL(setup_750_7400_hid0)
  176. mfspr r11,SPRN_HID0
  177. ori r11,r11,HID0_SGE | HID0_ABE | HID0_BHTE | HID0_BTIC
  178. oris r11,r11,HID0_DPM@h
  179. BEGIN_FTR_SECTION
  180. xori r11,r11,HID0_BTIC
  181. END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
  182. BEGIN_FTR_SECTION
  183. xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */
  184. END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
  185. li r3,HID0_SPD
  186. andc r11,r11,r3 /* clear SPD: enable speculative */
  187. li r3,0
  188. mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */
  189. isync
  190. mtspr SPRN_HID0,r11
  191. sync
  192. isync
  193. blr
  194. SYM_FUNC_END(setup_750_7400_hid0)
  195. /* 750cx specific
  196. * Looks like we have to disable NAP feature for some PLL settings...
  197. * (waiting for confirmation)
  198. */
  199. SYM_FUNC_START_LOCAL(setup_750cx)
  200. mfspr r10, SPRN_HID1
  201. rlwinm r10,r10,4,28,31
  202. cmpwi cr0,r10,7
  203. cmpwi cr1,r10,9
  204. cmpwi cr2,r10,11
  205. cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
  206. cror 4*cr0+eq,4*cr0+eq,4*cr2+eq
  207. bnelr
  208. lwz r6,CPU_SPEC_FEATURES(r4)
  209. li r7,CPU_FTR_CAN_NAP
  210. andc r6,r6,r7
  211. stw r6,CPU_SPEC_FEATURES(r4)
  212. blr
  213. SYM_FUNC_END(setup_750cx)
  214. /* 750fx specific
  215. */
  216. SYM_FUNC_START_LOCAL(setup_750fx)
  217. blr
  218. SYM_FUNC_END(setup_750fx)
  219. /* MPC 745x
  220. * Enable Store Gathering (SGE), Branch Folding (FOLD)
  221. * Branch History Table (BHTE), Branch Target ICache (BTIC)
  222. * Dynamic Power Management (DPM), Speculative (SPD)
  223. * Ensure our data cache instructions really operate.
  224. * Timebase has to be running or we wouldn't have made it here,
  225. * just ensure we don't disable it.
  226. * Clear Instruction cache throttling (ICTC)
  227. * Enable L2 HW prefetch
  228. */
  229. SYM_FUNC_START_LOCAL(setup_745x_specifics)
  230. /* We check for the presence of an L3 cache setup by
  231. * the firmware. If any, we disable NAP capability as
  232. * it's known to be bogus on rev 2.1 and earlier
  233. */
  234. BEGIN_FTR_SECTION
  235. mfspr r11,SPRN_L3CR
  236. andis. r11,r11,L3CR_L3E@h
  237. beq 1f
  238. END_FTR_SECTION_IFSET(CPU_FTR_L3CR)
  239. lwz r6,CPU_SPEC_FEATURES(r4)
  240. andis. r0,r6,CPU_FTR_L3_DISABLE_NAP@h
  241. beq 1f
  242. li r7,CPU_FTR_CAN_NAP
  243. andc r6,r6,r7
  244. stw r6,CPU_SPEC_FEATURES(r4)
  245. 1:
  246. mfspr r11,SPRN_HID0
  247. /* All of the bits we have to set.....
  248. */
  249. ori r11,r11,HID0_SGE | HID0_FOLD | HID0_BHTE
  250. ori r11,r11,HID0_LRSTK | HID0_BTIC
  251. oris r11,r11,HID0_DPM@h
  252. BEGIN_MMU_FTR_SECTION
  253. oris r11,r11,HID0_HIGH_BAT@h
  254. END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_HIGH_BATS)
  255. BEGIN_FTR_SECTION
  256. xori r11,r11,HID0_BTIC
  257. END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
  258. BEGIN_FTR_SECTION
  259. xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */
  260. END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
  261. /* All of the bits we have to clear....
  262. */
  263. li r3,HID0_SPD | HID0_NOPDST | HID0_NOPTI
  264. andc r11,r11,r3 /* clear SPD: enable speculative */
  265. li r3,0
  266. mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */
  267. isync
  268. mtspr SPRN_HID0,r11
  269. sync
  270. isync
  271. /* Enable L2 HW prefetch, if L2 is enabled
  272. */
  273. mfspr r3,SPRN_L2CR
  274. andis. r3,r3,L2CR_L2E@h
  275. beqlr
  276. mfspr r3,SPRN_MSSCR0
  277. ori r3,r3,3
  278. sync
  279. mtspr SPRN_MSSCR0,r3
  280. sync
  281. isync
  282. blr
  283. SYM_FUNC_END(setup_745x_specifics)
  284. /*
  285. * Initialize the FPU registers. This is needed to work around an errata
  286. * in some 750 cpus where using a not yet initialized FPU register after
  287. * power on reset may hang the CPU
  288. */
  289. _GLOBAL(__init_fpu_registers)
  290. mfmsr r10
  291. ori r11,r10,MSR_FP
  292. mtmsr r11
  293. isync
  294. addis r9,r3,empty_zero_page@ha
  295. addi r9,r9,empty_zero_page@l
  296. REST_32FPRS(0,r9)
  297. sync
  298. mtmsr r10
  299. isync
  300. blr
  301. _ASM_NOKPROBE_SYMBOL(__init_fpu_registers)
  302. /* Definitions for the table use to save CPU states */
  303. #define CS_HID0 0
  304. #define CS_HID1 4
  305. #define CS_HID2 8
  306. #define CS_MSSCR0 12
  307. #define CS_MSSSR0 16
  308. #define CS_ICTRL 20
  309. #define CS_LDSTCR 24
  310. #define CS_LDSTDB 28
  311. #define CS_SIZE 32
  312. .data
  313. .balign L1_CACHE_BYTES
  314. cpu_state_storage:
  315. .space CS_SIZE
  316. .balign L1_CACHE_BYTES,0
  317. .text
  318. /* Called in normal context to backup CPU 0 state. This
  319. * does not include cache settings. This function is also
  320. * called for machine sleep. This does not include the MMU
  321. * setup, BATs, etc... but rather the "special" registers
  322. * like HID0, HID1, MSSCR0, etc...
  323. */
  324. _GLOBAL(__save_cpu_setup)
  325. /* Some CR fields are volatile, we back it up all */
  326. mfcr r7
  327. /* Get storage ptr */
  328. lis r5,cpu_state_storage@h
  329. ori r5,r5,cpu_state_storage@l
  330. /* Save HID0 (common to all CONFIG_PPC_BOOK3S_32 cpus) */
  331. mfspr r3,SPRN_HID0
  332. stw r3,CS_HID0(r5)
  333. /* Now deal with CPU type dependent registers */
  334. mfspr r3,SPRN_PVR
  335. srwi r3,r3,16
  336. cmplwi cr0,r3,0x8000 /* 7450 */
  337. cmplwi cr1,r3,0x000c /* 7400 */
  338. cmplwi cr2,r3,0x800c /* 7410 */
  339. cmplwi cr3,r3,0x8001 /* 7455 */
  340. cmplwi cr4,r3,0x8002 /* 7457 */
  341. cmplwi cr5,r3,0x8003 /* 7447A */
  342. cmplwi cr6,r3,0x7000 /* 750FX */
  343. cmplwi cr7,r3,0x8004 /* 7448 */
  344. /* cr1 is 7400 || 7410 */
  345. cror 4*cr1+eq,4*cr1+eq,4*cr2+eq
  346. /* cr0 is 74xx */
  347. cror 4*cr0+eq,4*cr0+eq,4*cr3+eq
  348. cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
  349. cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
  350. cror 4*cr0+eq,4*cr0+eq,4*cr5+eq
  351. cror 4*cr0+eq,4*cr0+eq,4*cr7+eq
  352. bne 1f
  353. /* Backup 74xx specific regs */
  354. mfspr r4,SPRN_MSSCR0
  355. stw r4,CS_MSSCR0(r5)
  356. mfspr r4,SPRN_MSSSR0
  357. stw r4,CS_MSSSR0(r5)
  358. beq cr1,1f
  359. /* Backup 745x specific registers */
  360. mfspr r4,SPRN_HID1
  361. stw r4,CS_HID1(r5)
  362. mfspr r4,SPRN_ICTRL
  363. stw r4,CS_ICTRL(r5)
  364. mfspr r4,SPRN_LDSTCR
  365. stw r4,CS_LDSTCR(r5)
  366. mfspr r4,SPRN_LDSTDB
  367. stw r4,CS_LDSTDB(r5)
  368. 1:
  369. bne cr6,1f
  370. /* Backup 750FX specific registers */
  371. mfspr r4,SPRN_HID1
  372. stw r4,CS_HID1(r5)
  373. /* If rev 2.x, backup HID2 */
  374. mfspr r3,SPRN_PVR
  375. andi. r3,r3,0xff00
  376. cmpwi cr0,r3,0x0200
  377. bne 1f
  378. mfspr r4,SPRN_HID2_750FX
  379. stw r4,CS_HID2(r5)
  380. 1:
  381. mtcr r7
  382. blr
  383. /* Called with no MMU context (typically MSR:IR/DR off) to
  384. * restore CPU state as backed up by the previous
  385. * function. This does not include cache setting
  386. */
  387. _GLOBAL(__restore_cpu_setup)
  388. /* Some CR fields are volatile, we back it up all */
  389. mfcr r7
  390. /* Get storage ptr */
  391. lis r5,(cpu_state_storage-KERNELBASE)@h
  392. ori r5,r5,cpu_state_storage@l
  393. /* Restore HID0 */
  394. lwz r3,CS_HID0(r5)
  395. sync
  396. isync
  397. mtspr SPRN_HID0,r3
  398. sync
  399. isync
  400. /* Now deal with CPU type dependent registers */
  401. mfspr r3,SPRN_PVR
  402. srwi r3,r3,16
  403. cmplwi cr0,r3,0x8000 /* 7450 */
  404. cmplwi cr1,r3,0x000c /* 7400 */
  405. cmplwi cr2,r3,0x800c /* 7410 */
  406. cmplwi cr3,r3,0x8001 /* 7455 */
  407. cmplwi cr4,r3,0x8002 /* 7457 */
  408. cmplwi cr5,r3,0x8003 /* 7447A */
  409. cmplwi cr6,r3,0x7000 /* 750FX */
  410. cmplwi cr7,r3,0x8004 /* 7448 */
  411. /* cr1 is 7400 || 7410 */
  412. cror 4*cr1+eq,4*cr1+eq,4*cr2+eq
  413. /* cr0 is 74xx */
  414. cror 4*cr0+eq,4*cr0+eq,4*cr3+eq
  415. cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
  416. cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
  417. cror 4*cr0+eq,4*cr0+eq,4*cr5+eq
  418. cror 4*cr0+eq,4*cr0+eq,4*cr7+eq
  419. bne 2f
  420. /* Restore 74xx specific regs */
  421. lwz r4,CS_MSSCR0(r5)
  422. sync
  423. mtspr SPRN_MSSCR0,r4
  424. sync
  425. isync
  426. lwz r4,CS_MSSSR0(r5)
  427. sync
  428. mtspr SPRN_MSSSR0,r4
  429. sync
  430. isync
  431. bne cr2,1f
  432. /* Clear 7410 L2CR2 */
  433. li r4,0
  434. mtspr SPRN_L2CR2,r4
  435. 1: beq cr1,2f
  436. /* Restore 745x specific registers */
  437. lwz r4,CS_HID1(r5)
  438. sync
  439. mtspr SPRN_HID1,r4
  440. isync
  441. sync
  442. lwz r4,CS_ICTRL(r5)
  443. sync
  444. mtspr SPRN_ICTRL,r4
  445. isync
  446. sync
  447. lwz r4,CS_LDSTCR(r5)
  448. sync
  449. mtspr SPRN_LDSTCR,r4
  450. isync
  451. sync
  452. lwz r4,CS_LDSTDB(r5)
  453. sync
  454. mtspr SPRN_LDSTDB,r4
  455. isync
  456. sync
  457. 2: bne cr6,1f
  458. /* Restore 750FX specific registers
  459. * that is restore HID2 on rev 2.x and PLL config & switch
  460. * to PLL 0 on all
  461. */
  462. /* If rev 2.x, restore HID2 with low voltage bit cleared */
  463. mfspr r3,SPRN_PVR
  464. andi. r3,r3,0xff00
  465. cmpwi cr0,r3,0x0200
  466. bne 4f
  467. lwz r4,CS_HID2(r5)
  468. rlwinm r4,r4,0,19,17
  469. mtspr SPRN_HID2_750FX,r4
  470. sync
  471. 4:
  472. lwz r4,CS_HID1(r5)
  473. rlwinm r5,r4,0,16,14
  474. mtspr SPRN_HID1,r5
  475. /* Wait for PLL to stabilize */
  476. mftbl r5
  477. 3: mftbl r6
  478. sub r6,r6,r5
  479. cmplwi cr0,r6,10000
  480. ble 3b
  481. /* Setup final PLL */
  482. mtspr SPRN_HID1,r4
  483. 1:
  484. mtcr r7
  485. blr
  486. _ASM_NOKPROBE_SYMBOL(__restore_cpu_setup)