start.S 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982
  1. /* SPDX-License-Identifier: GPL-2.0+ */
  2. /*
  3. * Copyright 2004, 2007, 2011 Freescale Semiconductor.
  4. * Srikanth Srinivasan <srikanth.srinivaan@freescale.com>
  5. */
  6. /* U-Boot - Startup Code for 86xx PowerPC based Embedded Boards
  7. *
  8. *
  9. * The processor starts at 0xfff00100 and the code is executed
  10. * from flash. The code is organized to be at an other address
  11. * in memory, but as long we don't jump around before relocating.
  12. * board_init lies at a quite high address and when the cpu has
  13. * jumped there, everything is ok.
  14. */
  15. #include <asm-offsets.h>
  16. #include <config.h>
  17. #include <mpc86xx.h>
  18. #include <version.h>
  19. #include <ppc_asm.tmpl>
  20. #include <ppc_defs.h>
  21. #include <asm/cache.h>
  22. #include <asm/mmu.h>
  23. #include <asm/u-boot.h>
  24. /*
  25. * Need MSR_DR | MSR_IR enabled to access I/O (printf) in exceptions
  26. */
  27. /*
  28. * Set up GOT: Global Offset Table
  29. *
  30. * Use r12 to access the GOT
  31. */
  32. START_GOT
  33. GOT_ENTRY(_GOT2_TABLE_)
  34. GOT_ENTRY(_FIXUP_TABLE_)
  35. GOT_ENTRY(_start)
  36. GOT_ENTRY(_start_of_vectors)
  37. GOT_ENTRY(_end_of_vectors)
  38. GOT_ENTRY(transfer_to_handler)
  39. GOT_ENTRY(__init_end)
  40. GOT_ENTRY(__bss_end)
  41. GOT_ENTRY(__bss_start)
  42. END_GOT
  43. /*
  44. * r3 - 1st arg to board_init(): IMMP pointer
  45. * r4 - 2nd arg to board_init(): boot flag
  46. */
  47. .text
  48. .long 0x27051956 /* U-Boot Magic Number */
  49. .globl version_string
  50. version_string:
  51. .ascii U_BOOT_VERSION_STRING, "\0"
  52. . = EXC_OFF_SYS_RESET
  53. .globl _start
  54. _start:
  55. b boot_cold
  56. /* the boot code is located below the exception table */
  57. .globl _start_of_vectors
  58. _start_of_vectors:
  59. /* Machine check */
  60. STD_EXCEPTION(0x200, MachineCheck, MachineCheckException)
  61. /* Data Storage exception. */
  62. STD_EXCEPTION(0x300, DataStorage, UnknownException)
  63. /* Instruction Storage exception. */
  64. STD_EXCEPTION(0x400, InstStorage, UnknownException)
  65. /* External Interrupt exception. */
  66. STD_EXCEPTION(0x500, ExtInterrupt, external_interrupt)
  67. /* Alignment exception. */
  68. . = 0x600
  69. Alignment:
  70. EXCEPTION_PROLOG(SRR0, SRR1)
  71. mfspr r4,DAR
  72. stw r4,_DAR(r21)
  73. mfspr r5,DSISR
  74. stw r5,_DSISR(r21)
  75. addi r3,r1,STACK_FRAME_OVERHEAD
  76. EXC_XFER_TEMPLATE(Alignment, AlignmentException, MSR_KERNEL, COPY_EE)
  77. /* Program check exception */
  78. . = 0x700
  79. ProgramCheck:
  80. EXCEPTION_PROLOG(SRR0, SRR1)
  81. addi r3,r1,STACK_FRAME_OVERHEAD
  82. EXC_XFER_TEMPLATE(ProgramCheck, ProgramCheckException,
  83. MSR_KERNEL, COPY_EE)
  84. STD_EXCEPTION(0x800, FPUnavailable, UnknownException)
  85. /* I guess we could implement decrementer, and may have
  86. * to someday for timekeeping.
  87. */
  88. STD_EXCEPTION(0x900, Decrementer, timer_interrupt)
  89. STD_EXCEPTION(0xa00, Trap_0a, UnknownException)
  90. STD_EXCEPTION(0xb00, Trap_0b, UnknownException)
  91. STD_EXCEPTION(0xc00, SystemCall, UnknownException)
  92. STD_EXCEPTION(0xd00, SingleStep, UnknownException)
  93. STD_EXCEPTION(0xe00, Trap_0e, UnknownException)
  94. STD_EXCEPTION(0xf00, Trap_0f, UnknownException)
  95. STD_EXCEPTION(0x1000, SoftEmu, SoftEmuException)
  96. STD_EXCEPTION(0x1100, InstructionTLBMiss, UnknownException)
  97. STD_EXCEPTION(0x1200, DataTLBMiss, UnknownException)
  98. STD_EXCEPTION(0x1300, InstructionTLBError, UnknownException)
  99. STD_EXCEPTION(0x1400, DataTLBError, UnknownException)
  100. STD_EXCEPTION(0x1500, Reserved5, UnknownException)
  101. STD_EXCEPTION(0x1600, Reserved6, UnknownException)
  102. STD_EXCEPTION(0x1700, Reserved7, UnknownException)
  103. STD_EXCEPTION(0x1800, Reserved8, UnknownException)
  104. STD_EXCEPTION(0x1900, Reserved9, UnknownException)
  105. STD_EXCEPTION(0x1a00, ReservedA, UnknownException)
  106. STD_EXCEPTION(0x1b00, ReservedB, UnknownException)
  107. STD_EXCEPTION(0x1c00, DataBreakpoint, UnknownException)
  108. STD_EXCEPTION(0x1d00, InstructionBreakpoint, UnknownException)
  109. STD_EXCEPTION(0x1e00, PeripheralBreakpoint, UnknownException)
  110. STD_EXCEPTION(0x1f00, DevPortBreakpoint, UnknownException)
  111. .globl _end_of_vectors
  112. _end_of_vectors:
  113. . = 0x2000
  114. boot_cold:
  115. /*
  116. * NOTE: Only Cpu 0 will ever come here. Other cores go to an
  117. * address specified by the BPTR
  118. */
  119. 1:
  120. #ifdef CONFIG_SYS_RAMBOOT
  121. /* disable everything */
  122. li r0, 0
  123. mtspr HID0, r0
  124. sync
  125. mtmsr 0
  126. #endif
  127. /* Invalidate BATs */
  128. bl invalidate_bats
  129. sync
  130. /* Invalidate all of TLB before MMU turn on */
  131. bl clear_tlbs
  132. sync
  133. #ifdef CONFIG_SYS_L2
  134. /* init the L2 cache */
  135. lis r3, L2_INIT@h
  136. ori r3, r3, L2_INIT@l
  137. mtspr l2cr, r3
  138. /* invalidate the L2 cache */
  139. bl l2cache_invalidate
  140. sync
  141. #endif
  142. /*
  143. * Calculate absolute address in FLASH and jump there
  144. *------------------------------------------------------*/
  145. lis r3, CONFIG_SYS_MONITOR_BASE_EARLY@h
  146. ori r3, r3, CONFIG_SYS_MONITOR_BASE_EARLY@l
  147. addi r3, r3, in_flash - _start + EXC_OFF_SYS_RESET
  148. mtlr r3
  149. blr
  150. in_flash:
  151. /* let the C-code set up the rest */
  152. /* */
  153. /* Be careful to keep code relocatable ! */
  154. /*------------------------------------------------------*/
  155. /* perform low-level init */
  156. /* enable extended addressing */
  157. bl enable_ext_addr
  158. /* setup the bats */
  159. bl early_bats
  160. /*
  161. * Cache must be enabled here for stack-in-cache trick.
  162. * This means we need to enable the BATS.
  163. * Cache should be turned on after BATs, since by default
  164. * everything is write-through.
  165. */
  166. /* enable address translation */
  167. mfmsr r5
  168. ori r5, r5, (MSR_IR | MSR_DR)
  169. lis r3,addr_trans_enabled@h
  170. ori r3, r3, addr_trans_enabled@l
  171. mtspr SPRN_SRR0,r3
  172. mtspr SPRN_SRR1,r5
  173. rfi
  174. addr_trans_enabled:
  175. /* enable and invalidate the data cache */
  176. /* bl l1dcache_enable */
  177. bl dcache_enable
  178. sync
  179. #if 1
  180. bl icache_enable
  181. #endif
  182. #ifdef CONFIG_SYS_INIT_RAM_LOCK
  183. bl lock_ram_in_cache
  184. sync
  185. #endif
  186. #if (CONFIG_SYS_CCSRBAR_DEFAULT != CONFIG_SYS_CCSRBAR)
  187. bl setup_ccsrbar
  188. #endif
  189. /* set up the stack pointer in our newly created
  190. * cache-ram (r1) */
  191. lis r1, (CONFIG_SYS_INIT_RAM_ADDR + CONFIG_SYS_GBL_DATA_OFFSET)@h
  192. ori r1, r1, (CONFIG_SYS_INIT_RAM_ADDR + CONFIG_SYS_GBL_DATA_OFFSET)@l
  193. li r0, 0 /* Make room for stack frame header and */
  194. stwu r0, -4(r1) /* clear final stack frame so that */
  195. stwu r0, -4(r1) /* stack backtraces terminate cleanly */
  196. GET_GOT /* initialize GOT access */
  197. /* run low-level CPU init code (from Flash) */
  198. bl cpu_init_f
  199. sync
  200. #ifdef RUN_DIAG
  201. /* Load PX_AUX register address in r4 */
  202. lis r4, PIXIS_BASE@h
  203. ori r4, r4, 0x6
  204. /* Load contents of PX_AUX in r3 bits 24 to 31*/
  205. lbz r3, 0(r4)
  206. /* Mask and obtain the bit in r3 */
  207. rlwinm. r3, r3, 0, 24, 24
  208. /* If not zero, jump and continue with u-boot */
  209. bne diag_done
  210. /* Load back contents of PX_AUX in r3 bits 24 to 31 */
  211. lbz r3, 0(r4)
  212. /* Set the MSB of the register value */
  213. ori r3, r3, 0x80
  214. /* Write value in r3 back to PX_AUX */
  215. stb r3, 0(r4)
  216. /* Get the address to jump to in r3*/
  217. lis r3, CONFIG_SYS_DIAG_ADDR@h
  218. ori r3, r3, CONFIG_SYS_DIAG_ADDR@l
  219. /* Load the LR with the branch address */
  220. mtlr r3
  221. /* Branch to diagnostic */
  222. blr
  223. diag_done:
  224. #endif
  225. /* bl l2cache_enable */
  226. /* run 1st part of board init code (from Flash) */
  227. li r3, 0 /* clear boot_flag for calling board_init_f */
  228. bl board_init_f
  229. sync
  230. /* NOTREACHED - board_init_f() does not return */
  231. .globl invalidate_bats
  232. invalidate_bats:
  233. li r0, 0
  234. /* invalidate BATs */
  235. mtspr IBAT0U, r0
  236. mtspr IBAT1U, r0
  237. mtspr IBAT2U, r0
  238. mtspr IBAT3U, r0
  239. mtspr IBAT4U, r0
  240. mtspr IBAT5U, r0
  241. mtspr IBAT6U, r0
  242. mtspr IBAT7U, r0
  243. isync
  244. mtspr DBAT0U, r0
  245. mtspr DBAT1U, r0
  246. mtspr DBAT2U, r0
  247. mtspr DBAT3U, r0
  248. mtspr DBAT4U, r0
  249. mtspr DBAT5U, r0
  250. mtspr DBAT6U, r0
  251. mtspr DBAT7U, r0
  252. isync
  253. sync
  254. blr
  255. #define CONFIG_BAT_PAIR(n) \
  256. lis r4, CONFIG_SYS_IBAT##n##L@h; \
  257. ori r4, r4, CONFIG_SYS_IBAT##n##L@l; \
  258. lis r3, CONFIG_SYS_IBAT##n##U@h; \
  259. ori r3, r3, CONFIG_SYS_IBAT##n##U@l; \
  260. mtspr IBAT##n##L, r4; \
  261. mtspr IBAT##n##U, r3; \
  262. lis r4, CONFIG_SYS_DBAT##n##L@h; \
  263. ori r4, r4, CONFIG_SYS_DBAT##n##L@l; \
  264. lis r3, CONFIG_SYS_DBAT##n##U@h; \
  265. ori r3, r3, CONFIG_SYS_DBAT##n##U@l; \
  266. mtspr DBAT##n##L, r4; \
  267. mtspr DBAT##n##U, r3;
  268. /*
  269. * setup_bats:
  270. *
  271. * Set up the final BAT registers now that setup is done.
  272. *
  273. * Assumes that:
  274. * 1) Address translation is enabled upon entry
  275. * 2) The boot rom is still accessible via 1:1 translation
  276. */
  277. .globl setup_bats
  278. setup_bats:
  279. mflr r5
  280. sync
  281. /*
  282. * When we disable address translation, we will get 1:1 (VA==PA)
  283. * translation. The only place we know for sure is safe for that is
  284. * the bootrom where we originally started out. Pop back into there.
  285. */
  286. lis r4, CONFIG_SYS_MONITOR_BASE_EARLY@h
  287. ori r4, r4, CONFIG_SYS_MONITOR_BASE_EARLY@l
  288. addi r4, r4, trans_disabled - _start + EXC_OFF_SYS_RESET
  289. /* disable address translation */
  290. mfmsr r3
  291. rlwinm r3, r3, 0, 28, 25
  292. mtspr SRR0, r4
  293. mtspr SRR1, r3
  294. rfi
  295. trans_disabled:
  296. #if defined(CONFIG_SYS_DBAT0U) && defined(CONFIG_SYS_DBAT0L) \
  297. && defined(CONFIG_SYS_IBAT0U) && defined(CONFIG_SYS_IBAT0L)
  298. CONFIG_BAT_PAIR(0)
  299. #endif
  300. CONFIG_BAT_PAIR(1)
  301. CONFIG_BAT_PAIR(2)
  302. CONFIG_BAT_PAIR(3)
  303. CONFIG_BAT_PAIR(4)
  304. CONFIG_BAT_PAIR(5)
  305. CONFIG_BAT_PAIR(6)
  306. CONFIG_BAT_PAIR(7)
  307. sync
  308. isync
  309. /* Turn translation back on and return */
  310. mfmsr r3
  311. ori r3, r3, (MSR_IR | MSR_DR)
  312. mtspr SPRN_SRR0,r5
  313. mtspr SPRN_SRR1,r3
  314. rfi
  315. /*
  316. * early_bats:
  317. *
  318. * Set up bats needed early on - this is usually the BAT for the
  319. * stack-in-cache, the Flash, and CCSR space
  320. */
  321. .globl early_bats
  322. early_bats:
  323. /* IBAT 3 */
  324. lis r4, CONFIG_SYS_IBAT3L@h
  325. ori r4, r4, CONFIG_SYS_IBAT3L@l
  326. lis r3, CONFIG_SYS_IBAT3U@h
  327. ori r3, r3, CONFIG_SYS_IBAT3U@l
  328. mtspr IBAT3L, r4
  329. mtspr IBAT3U, r3
  330. isync
  331. /* DBAT 3 */
  332. lis r4, CONFIG_SYS_DBAT3L@h
  333. ori r4, r4, CONFIG_SYS_DBAT3L@l
  334. lis r3, CONFIG_SYS_DBAT3U@h
  335. ori r3, r3, CONFIG_SYS_DBAT3U@l
  336. mtspr DBAT3L, r4
  337. mtspr DBAT3U, r3
  338. isync
  339. /* IBAT 5 */
  340. lis r4, CONFIG_SYS_IBAT5L@h
  341. ori r4, r4, CONFIG_SYS_IBAT5L@l
  342. lis r3, CONFIG_SYS_IBAT5U@h
  343. ori r3, r3, CONFIG_SYS_IBAT5U@l
  344. mtspr IBAT5L, r4
  345. mtspr IBAT5U, r3
  346. isync
  347. /* DBAT 5 */
  348. lis r4, CONFIG_SYS_DBAT5L@h
  349. ori r4, r4, CONFIG_SYS_DBAT5L@l
  350. lis r3, CONFIG_SYS_DBAT5U@h
  351. ori r3, r3, CONFIG_SYS_DBAT5U@l
  352. mtspr DBAT5L, r4
  353. mtspr DBAT5U, r3
  354. isync
  355. /* IBAT 6 */
  356. lis r4, CONFIG_SYS_IBAT6L_EARLY@h
  357. ori r4, r4, CONFIG_SYS_IBAT6L_EARLY@l
  358. lis r3, CONFIG_SYS_IBAT6U_EARLY@h
  359. ori r3, r3, CONFIG_SYS_IBAT6U_EARLY@l
  360. mtspr IBAT6L, r4
  361. mtspr IBAT6U, r3
  362. isync
  363. /* DBAT 6 */
  364. lis r4, CONFIG_SYS_DBAT6L_EARLY@h
  365. ori r4, r4, CONFIG_SYS_DBAT6L_EARLY@l
  366. lis r3, CONFIG_SYS_DBAT6U_EARLY@h
  367. ori r3, r3, CONFIG_SYS_DBAT6U_EARLY@l
  368. mtspr DBAT6L, r4
  369. mtspr DBAT6U, r3
  370. isync
  371. #if(CONFIG_SYS_CCSRBAR_DEFAULT != CONFIG_SYS_CCSRBAR)
  372. /* IBAT 7 */
  373. lis r4, CONFIG_SYS_CCSR_DEFAULT_IBATL@h
  374. ori r4, r4, CONFIG_SYS_CCSR_DEFAULT_IBATL@l
  375. lis r3, CONFIG_SYS_CCSR_DEFAULT_IBATU@h
  376. ori r3, r3, CONFIG_SYS_CCSR_DEFAULT_IBATU@l
  377. mtspr IBAT7L, r4
  378. mtspr IBAT7U, r3
  379. isync
  380. /* DBAT 7 */
  381. lis r4, CONFIG_SYS_CCSR_DEFAULT_DBATL@h
  382. ori r4, r4, CONFIG_SYS_CCSR_DEFAULT_DBATL@l
  383. lis r3, CONFIG_SYS_CCSR_DEFAULT_DBATU@h
  384. ori r3, r3, CONFIG_SYS_CCSR_DEFAULT_DBATU@l
  385. mtspr DBAT7L, r4
  386. mtspr DBAT7U, r3
  387. isync
  388. #endif
  389. blr
  390. .globl clear_tlbs
  391. clear_tlbs:
  392. addis r3, 0, 0x0000
  393. addis r5, 0, 0x4
  394. isync
  395. tlblp:
  396. tlbie r3
  397. sync
  398. addi r3, r3, 0x1000
  399. cmp 0, 0, r3, r5
  400. blt tlblp
  401. blr
  402. .globl disable_addr_trans
  403. disable_addr_trans:
  404. /* disable address translation */
  405. mflr r4
  406. mfmsr r3
  407. andi. r0, r3, (MSR_IR | MSR_DR)
  408. beqlr
  409. andc r3, r3, r0
  410. mtspr SRR0, r4
  411. mtspr SRR1, r3
  412. rfi
  413. /*
  414. * This code finishes saving the registers to the exception frame
  415. * and jumps to the appropriate handler for the exception.
  416. * Register r21 is pointer into trap frame, r1 has new stack pointer.
  417. */
  418. .globl transfer_to_handler
  419. transfer_to_handler:
  420. stw r22,_NIP(r21)
  421. lis r22,MSR_POW@h
  422. andc r23,r23,r22
  423. stw r23,_MSR(r21)
  424. SAVE_GPR(7, r21)
  425. SAVE_4GPRS(8, r21)
  426. SAVE_8GPRS(12, r21)
  427. SAVE_8GPRS(24, r21)
  428. mflr r23
  429. andi. r24,r23,0x3f00 /* get vector offset */
  430. stw r24,TRAP(r21)
  431. li r22,0
  432. stw r22,RESULT(r21)
  433. mtspr SPRG2,r22 /* r1 is now kernel sp */
  434. lwz r24,0(r23) /* virtual address of handler */
  435. lwz r23,4(r23) /* where to go when done */
  436. mtspr SRR0,r24
  437. mtspr SRR1,r20
  438. mtlr r23
  439. SYNC
  440. rfi /* jump to handler, enable MMU */
  441. int_return:
  442. mfmsr r28 /* Disable interrupts */
  443. li r4,0
  444. ori r4,r4,MSR_EE
  445. andc r28,r28,r4
  446. SYNC /* Some chip revs need this... */
  447. mtmsr r28
  448. SYNC
  449. lwz r2,_CTR(r1)
  450. lwz r0,_LINK(r1)
  451. mtctr r2
  452. mtlr r0
  453. lwz r2,_XER(r1)
  454. lwz r0,_CCR(r1)
  455. mtspr XER,r2
  456. mtcrf 0xFF,r0
  457. REST_10GPRS(3, r1)
  458. REST_10GPRS(13, r1)
  459. REST_8GPRS(23, r1)
  460. REST_GPR(31, r1)
  461. lwz r2,_NIP(r1) /* Restore environment */
  462. lwz r0,_MSR(r1)
  463. mtspr SRR0,r2
  464. mtspr SRR1,r0
  465. lwz r0,GPR0(r1)
  466. lwz r2,GPR2(r1)
  467. lwz r1,GPR1(r1)
  468. SYNC
  469. rfi
  470. .globl dc_read
  471. dc_read:
  472. blr
  473. /*
  474. * Function: in8
  475. * Description: Input 8 bits
  476. */
  477. .globl in8
  478. in8:
  479. lbz r3,0x0000(r3)
  480. blr
  481. /*
  482. * Function: out8
  483. * Description: Output 8 bits
  484. */
  485. .globl out8
  486. out8:
  487. stb r4,0x0000(r3)
  488. blr
  489. /*
  490. * Function: out16
  491. * Description: Output 16 bits
  492. */
  493. .globl out16
  494. out16:
  495. sth r4,0x0000(r3)
  496. blr
  497. /*
  498. * Function: out16r
  499. * Description: Byte reverse and output 16 bits
  500. */
  501. .globl out16r
  502. out16r:
  503. sthbrx r4,r0,r3
  504. blr
  505. /*
  506. * Function: out32
  507. * Description: Output 32 bits
  508. */
  509. .globl out32
  510. out32:
  511. stw r4,0x0000(r3)
  512. blr
  513. /*
  514. * Function: out32r
  515. * Description: Byte reverse and output 32 bits
  516. */
  517. .globl out32r
  518. out32r:
  519. stwbrx r4,r0,r3
  520. blr
  521. /*
  522. * Function: in16
  523. * Description: Input 16 bits
  524. */
  525. .globl in16
  526. in16:
  527. lhz r3,0x0000(r3)
  528. blr
  529. /*
  530. * Function: in16r
  531. * Description: Input 16 bits and byte reverse
  532. */
  533. .globl in16r
  534. in16r:
  535. lhbrx r3,r0,r3
  536. blr
  537. /*
  538. * Function: in32
  539. * Description: Input 32 bits
  540. */
  541. .globl in32
  542. in32:
  543. lwz 3,0x0000(3)
  544. blr
  545. /*
  546. * Function: in32r
  547. * Description: Input 32 bits and byte reverse
  548. */
  549. .globl in32r
  550. in32r:
  551. lwbrx r3,r0,r3
  552. blr
  553. /*
  554. * void relocate_code(addr_sp, gd, addr_moni)
  555. *
  556. * This "function" does not return, instead it continues in RAM
  557. * after relocating the monitor code.
  558. *
  559. * r3 = dest
  560. * r4 = src
  561. * r5 = length in bytes
  562. * r6 = cachelinesize
  563. */
  564. .globl relocate_code
  565. relocate_code:
  566. mr r1, r3 /* Set new stack pointer */
  567. mr r9, r4 /* Save copy of Global Data pointer */
  568. mr r10, r5 /* Save copy of Destination Address */
  569. GET_GOT
  570. mr r3, r5 /* Destination Address */
  571. lis r4, CONFIG_SYS_MONITOR_BASE@h /* Source Address */
  572. ori r4, r4, CONFIG_SYS_MONITOR_BASE@l
  573. lwz r5, GOT(__init_end)
  574. sub r5, r5, r4
  575. li r6, CONFIG_SYS_CACHELINE_SIZE /* Cache Line Size */
  576. /*
  577. * Fix GOT pointer:
  578. *
  579. * New GOT-PTR = (old GOT-PTR - CONFIG_SYS_MONITOR_BASE) + Destination Address
  580. *
  581. * Offset:
  582. */
  583. sub r15, r10, r4
  584. /* First our own GOT */
  585. add r12, r12, r15
  586. /* then the one used by the C code */
  587. add r30, r30, r15
  588. /*
  589. * Now relocate code
  590. */
  591. cmplw cr1,r3,r4
  592. addi r0,r5,3
  593. srwi. r0,r0,2
  594. beq cr1,4f /* In place copy is not necessary */
  595. beq 7f /* Protect against 0 count */
  596. mtctr r0
  597. bge cr1,2f
  598. la r8,-4(r4)
  599. la r7,-4(r3)
  600. 1: lwzu r0,4(r8)
  601. stwu r0,4(r7)
  602. bdnz 1b
  603. b 4f
  604. 2: slwi r0,r0,2
  605. add r8,r4,r0
  606. add r7,r3,r0
  607. 3: lwzu r0,-4(r8)
  608. stwu r0,-4(r7)
  609. bdnz 3b
  610. /*
  611. * Now flush the cache: note that we must start from a cache aligned
  612. * address. Otherwise we might miss one cache line.
  613. */
  614. 4: cmpwi r6,0
  615. add r5,r3,r5
  616. beq 7f /* Always flush prefetch queue in any case */
  617. subi r0,r6,1
  618. andc r3,r3,r0
  619. mr r4,r3
  620. 5: dcbst 0,r4
  621. add r4,r4,r6
  622. cmplw r4,r5
  623. blt 5b
  624. sync /* Wait for all dcbst to complete on bus */
  625. mr r4,r3
  626. 6: icbi 0,r4
  627. add r4,r4,r6
  628. cmplw r4,r5
  629. blt 6b
  630. 7: sync /* Wait for all icbi to complete on bus */
  631. isync
  632. /*
  633. * We are done. Do not return, instead branch to second part of board
  634. * initialization, now running from RAM.
  635. */
  636. addi r0, r10, in_ram - _start + EXC_OFF_SYS_RESET
  637. mtlr r0
  638. blr
  639. in_ram:
  640. /*
  641. * Relocation Function, r12 point to got2+0x8000
  642. *
  643. * Adjust got2 pointers, no need to check for 0, this code
  644. * already puts a few entries in the table.
  645. */
  646. li r0,__got2_entries@sectoff@l
  647. la r3,GOT(_GOT2_TABLE_)
  648. lwz r11,GOT(_GOT2_TABLE_)
  649. mtctr r0
  650. sub r11,r3,r11
  651. addi r3,r3,-4
  652. 1: lwzu r0,4(r3)
  653. cmpwi r0,0
  654. beq- 2f
  655. add r0,r0,r11
  656. stw r0,0(r3)
  657. 2: bdnz 1b
  658. /*
  659. * Now adjust the fixups and the pointers to the fixups
  660. * in case we need to move ourselves again.
  661. */
  662. li r0,__fixup_entries@sectoff@l
  663. lwz r3,GOT(_FIXUP_TABLE_)
  664. cmpwi r0,0
  665. mtctr r0
  666. addi r3,r3,-4
  667. beq 4f
  668. 3: lwzu r4,4(r3)
  669. lwzux r0,r4,r11
  670. cmpwi r0,0
  671. add r0,r0,r11
  672. stw r4,0(r3)
  673. beq- 5f
  674. stw r0,0(r4)
  675. 5: bdnz 3b
  676. 4:
  677. /* clear_bss: */
  678. /*
  679. * Now clear BSS segment
  680. */
  681. lwz r3,GOT(__bss_start)
  682. lwz r4,GOT(__bss_end)
  683. cmplw 0, r3, r4
  684. beq 6f
  685. li r0, 0
  686. 5:
  687. stw r0, 0(r3)
  688. addi r3, r3, 4
  689. cmplw 0, r3, r4
  690. bne 5b
  691. 6:
  692. mr r3, r9 /* Init Date pointer */
  693. mr r4, r10 /* Destination Address */
  694. bl board_init_r
  695. /* not reached - end relocate_code */
  696. /*-----------------------------------------------------------------------*/
  697. /*
  698. * Copy exception vector code to low memory
  699. *
  700. * r3: dest_addr
  701. * r7: source address, r8: end address, r9: target address
  702. */
  703. .globl trap_init
  704. trap_init:
  705. mflr r4 /* save link register */
  706. GET_GOT
  707. lwz r7, GOT(_start)
  708. lwz r8, GOT(_end_of_vectors)
  709. li r9, 0x100 /* reset vector always at 0x100 */
  710. cmplw 0, r7, r8
  711. bgelr /* return if r7>=r8 - just in case */
  712. 1:
  713. lwz r0, 0(r7)
  714. stw r0, 0(r9)
  715. addi r7, r7, 4
  716. addi r9, r9, 4
  717. cmplw 0, r7, r8
  718. bne 1b
  719. /*
  720. * relocate `hdlr' and `int_return' entries
  721. */
  722. li r7, .L_MachineCheck - _start + EXC_OFF_SYS_RESET
  723. li r8, Alignment - _start + EXC_OFF_SYS_RESET
  724. 2:
  725. bl trap_reloc
  726. addi r7, r7, 0x100 /* next exception vector */
  727. cmplw 0, r7, r8
  728. blt 2b
  729. li r7, .L_Alignment - _start + EXC_OFF_SYS_RESET
  730. bl trap_reloc
  731. li r7, .L_ProgramCheck - _start + EXC_OFF_SYS_RESET
  732. bl trap_reloc
  733. li r7, .L_FPUnavailable - _start + EXC_OFF_SYS_RESET
  734. li r8, SystemCall - _start + EXC_OFF_SYS_RESET
  735. 3:
  736. bl trap_reloc
  737. addi r7, r7, 0x100 /* next exception vector */
  738. cmplw 0, r7, r8
  739. blt 3b
  740. li r7, .L_SingleStep - _start + EXC_OFF_SYS_RESET
  741. li r8, _end_of_vectors - _start + EXC_OFF_SYS_RESET
  742. 4:
  743. bl trap_reloc
  744. addi r7, r7, 0x100 /* next exception vector */
  745. cmplw 0, r7, r8
  746. blt 4b
  747. /* enable execptions from RAM vectors */
  748. mfmsr r7
  749. li r8,MSR_IP
  750. andc r7,r7,r8
  751. ori r7,r7,MSR_ME /* Enable Machine Check */
  752. mtmsr r7
  753. mtlr r4 /* restore link register */
  754. blr
  755. .globl enable_ext_addr
  756. enable_ext_addr:
  757. mfspr r0, HID0
  758. lis r0, (HID0_HIGH_BAT_EN | HID0_XBSEN | HID0_XAEN)@h
  759. ori r0, r0, (HID0_HIGH_BAT_EN | HID0_XBSEN | HID0_XAEN)@l
  760. mtspr HID0, r0
  761. sync
  762. isync
  763. blr
  764. #if (CONFIG_SYS_CCSRBAR_DEFAULT != CONFIG_SYS_CCSRBAR)
  765. .globl setup_ccsrbar
  766. setup_ccsrbar:
  767. /* Special sequence needed to update CCSRBAR itself */
  768. lis r4, CONFIG_SYS_CCSRBAR_DEFAULT@h
  769. ori r4, r4, CONFIG_SYS_CCSRBAR_DEFAULT@l
  770. lis r5, CONFIG_SYS_CCSRBAR_PHYS_LOW@h
  771. ori r5, r5, CONFIG_SYS_CCSRBAR_PHYS_LOW@l
  772. srwi r5,r5,12
  773. li r6, CONFIG_SYS_CCSRBAR_PHYS_HIGH@l
  774. rlwimi r5,r6,20,8,11
  775. stw r5, 0(r4) /* Store physical value of CCSR */
  776. isync
  777. lis r5, CONFIG_SYS_TEXT_BASE@h
  778. ori r5,r5,CONFIG_SYS_TEXT_BASE@l
  779. lwz r5, 0(r5)
  780. isync
  781. /* Use VA of CCSR to do read */
  782. lis r3, CONFIG_SYS_CCSRBAR@h
  783. lwz r5, CONFIG_SYS_CCSRBAR@l(r3)
  784. isync
  785. blr
  786. #endif
  787. #ifdef CONFIG_SYS_INIT_RAM_LOCK
  788. lock_ram_in_cache:
  789. /* Allocate Initial RAM in data cache.
  790. */
  791. lis r3, (CONFIG_SYS_INIT_RAM_ADDR & ~31)@h
  792. ori r3, r3, (CONFIG_SYS_INIT_RAM_ADDR & ~31)@l
  793. li r4, ((CONFIG_SYS_INIT_RAM_SIZE & ~31) + \
  794. (CONFIG_SYS_INIT_RAM_ADDR & 31) + 31) / 32
  795. mtctr r4
  796. 1:
  797. dcbz r0, r3
  798. addi r3, r3, 32
  799. bdnz 1b
  800. #if 1
  801. /* Lock the data cache */
  802. mfspr r0, HID0
  803. ori r0, r0, 0x1000
  804. sync
  805. mtspr HID0, r0
  806. sync
  807. blr
  808. #endif
  809. #if 0
  810. /* Lock the first way of the data cache */
  811. mfspr r0, LDSTCR
  812. ori r0, r0, 0x0080
  813. #if defined(CONFIG_ALTIVEC)
  814. dssall
  815. #endif
  816. sync
  817. mtspr LDSTCR, r0
  818. sync
  819. isync
  820. blr
  821. #endif
  822. .globl unlock_ram_in_cache
  823. unlock_ram_in_cache:
  824. /* invalidate the INIT_RAM section */
  825. lis r3, (CONFIG_SYS_INIT_RAM_ADDR & ~31)@h
  826. ori r3, r3, (CONFIG_SYS_INIT_RAM_ADDR & ~31)@l
  827. li r4, ((CONFIG_SYS_INIT_RAM_SIZE & ~31) + \
  828. (CONFIG_SYS_INIT_RAM_ADDR & 31) + 31) / 32
  829. mtctr r4
  830. 1: icbi r0, r3
  831. addi r3, r3, 32
  832. bdnz 1b
  833. sync /* Wait for all icbi to complete on bus */
  834. isync
  835. #if 1
  836. /* Unlock the data cache and invalidate it */
  837. mfspr r0, HID0
  838. li r3,0x1000
  839. andc r0,r0,r3
  840. li r3,0x0400
  841. or r0,r0,r3
  842. sync
  843. mtspr HID0, r0
  844. sync
  845. blr
  846. #endif
  847. #if 0
  848. /* Unlock the first way of the data cache */
  849. mfspr r0, LDSTCR
  850. li r3,0x0080
  851. andc r0,r0,r3
  852. #ifdef CONFIG_ALTIVEC
  853. dssall
  854. #endif
  855. sync
  856. mtspr LDSTCR, r0
  857. sync
  858. isync
  859. li r3,0x0400
  860. or r0,r0,r3
  861. sync
  862. mtspr HID0, r0
  863. sync
  864. blr
  865. #endif
  866. #endif