start.S 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768
  1. /*
  2. * Copyright (C) 1998 Dan Malek <dmalek@jlc.net>
  3. * Copyright (C) 1999 Magnus Damm <kieraypc01.p.y.kie.era.ericsson.se>
  4. * Copyright (C) 2000 - 2003 Wolfgang Denk <wd@denx.de>
  5. *
  6. * See file CREDITS for list of people who contributed to this
  7. * project.
  8. *
  9. * This program is free software; you can redistribute it and/or
  10. * modify it under the terms of the GNU General Public License as
  11. * published by the Free Software Foundation; either version 2 of
  12. * the License, or (at your option) any later version.
  13. *
  14. * This program is distributed in the hope that it will be useful,
  15. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17. * GNU General Public License for more details.
  18. *
  19. * You should have received a copy of the GNU General Public License
  20. * along with this program; if not, write to the Free Software
  21. * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
  22. * MA 02111-1307 USA
  23. */
  24. /*
  25. * U-Boot - Startup Code for MPC5xxx CPUs
  26. */
  27. #include <asm-offsets.h>
  28. #include <config.h>
  29. #include <mpc5xxx.h>
  30. #include <timestamp.h>
  31. #include <version.h>
  32. #define CONFIG_MPC5xxx 1 /* needed for Linux kernel header files */
  33. #define _LINUX_CONFIG_H 1 /* avoid reading Linux autoconf.h file */
  34. #include <ppc_asm.tmpl>
  35. #include <ppc_defs.h>
  36. #include <asm/cache.h>
  37. #include <asm/mmu.h>
  38. #include <asm/u-boot.h>
  39. #ifndef CONFIG_IDENT_STRING
  40. #define CONFIG_IDENT_STRING ""
  41. #endif
  42. /* We don't want the MMU yet.
  43. */
  44. #undef MSR_KERNEL
  45. /* Floating Point enable, Machine Check and Recoverable Interr. */
  46. #ifdef DEBUG
  47. #define MSR_KERNEL (MSR_FP|MSR_RI)
  48. #else
  49. #define MSR_KERNEL (MSR_FP|MSR_ME|MSR_RI)
  50. #endif
  51. /*
  52. * Set up GOT: Global Offset Table
  53. *
  54. * Use r12 to access the GOT
  55. */
  56. START_GOT
  57. GOT_ENTRY(_GOT2_TABLE_)
  58. GOT_ENTRY(_FIXUP_TABLE_)
  59. GOT_ENTRY(_start)
  60. GOT_ENTRY(_start_of_vectors)
  61. GOT_ENTRY(_end_of_vectors)
  62. GOT_ENTRY(transfer_to_handler)
  63. GOT_ENTRY(__init_end)
  64. GOT_ENTRY(_end)
  65. GOT_ENTRY(__bss_start)
  66. END_GOT
  67. /*
  68. * Version string
  69. */
  70. .data
  71. .globl version_string
  72. version_string:
  73. .ascii U_BOOT_VERSION
  74. .ascii " (", U_BOOT_DATE, " - ", U_BOOT_TIME, ")"
  75. .ascii CONFIG_IDENT_STRING, "\0"
  76. /*
  77. * Exception vectors
  78. */
  79. .text
  80. . = EXC_OFF_SYS_RESET
  81. .globl _start
  82. _start:
  83. mfmsr r5 /* save msr contents */
  84. /* Move CSBoot and adjust instruction pointer */
  85. /*--------------------------------------------------------------*/
  86. #if defined(CONFIG_SYS_LOWBOOT)
  87. # if defined(CONFIG_SYS_RAMBOOT)
  88. # error CONFIG_SYS_LOWBOOT is incompatible with CONFIG_SYS_RAMBOOT
  89. # endif /* CONFIG_SYS_RAMBOOT */
  90. lis r4, CONFIG_SYS_DEFAULT_MBAR@h
  91. lis r3, START_REG(CONFIG_SYS_BOOTCS_START)@h
  92. ori r3, r3, START_REG(CONFIG_SYS_BOOTCS_START)@l
  93. stw r3, 0x4(r4) /* CS0 start */
  94. lis r3, STOP_REG(CONFIG_SYS_BOOTCS_START, CONFIG_SYS_BOOTCS_SIZE)@h
  95. ori r3, r3, STOP_REG(CONFIG_SYS_BOOTCS_START, CONFIG_SYS_BOOTCS_SIZE)@l
  96. stw r3, 0x8(r4) /* CS0 stop */
  97. lis r3, 0x02010000@h
  98. ori r3, r3, 0x02010000@l
  99. stw r3, 0x54(r4) /* CS0 and Boot enable */
  100. lis r3, lowboot_reentry@h /* jump from bootlow address space (0x0000xxxx) */
  101. ori r3, r3, lowboot_reentry@l /* to the address space the linker used */
  102. mtlr r3
  103. blr
  104. lowboot_reentry:
  105. lis r3, START_REG(CONFIG_SYS_BOOTCS_START)@h
  106. ori r3, r3, START_REG(CONFIG_SYS_BOOTCS_START)@l
  107. stw r3, 0x4c(r4) /* Boot start */
  108. lis r3, STOP_REG(CONFIG_SYS_BOOTCS_START, CONFIG_SYS_BOOTCS_SIZE)@h
  109. ori r3, r3, STOP_REG(CONFIG_SYS_BOOTCS_START, CONFIG_SYS_BOOTCS_SIZE)@l
  110. stw r3, 0x50(r4) /* Boot stop */
  111. lis r3, 0x02000001@h
  112. ori r3, r3, 0x02000001@l
  113. stw r3, 0x54(r4) /* Boot enable, CS0 disable */
  114. #endif /* CONFIG_SYS_LOWBOOT */
  115. #if defined(CONFIG_SYS_DEFAULT_MBAR) && !defined(CONFIG_SYS_RAMBOOT)
  116. lis r3, CONFIG_SYS_MBAR@h
  117. ori r3, r3, CONFIG_SYS_MBAR@l
  118. /* MBAR is mirrored into the MBAR SPR */
  119. mtspr MBAR,r3
  120. rlwinm r3, r3, 16, 16, 31
  121. lis r4, CONFIG_SYS_DEFAULT_MBAR@h
  122. stw r3, 0(r4)
  123. #endif /* CONFIG_SYS_DEFAULT_MBAR */
  124. /* Initialise the MPC5xxx processor core */
  125. /*--------------------------------------------------------------*/
  126. bl init_5xxx_core
  127. /* initialize some things that are hard to access from C */
  128. /*--------------------------------------------------------------*/
  129. /* set up stack in on-chip SRAM */
  130. lis r3, CONFIG_SYS_INIT_RAM_ADDR@h
  131. ori r3, r3, CONFIG_SYS_INIT_RAM_ADDR@l
  132. ori r1, r3, CONFIG_SYS_INIT_SP_OFFSET
  133. li r0, 0 /* Make room for stack frame header and */
  134. stwu r0, -4(r1) /* clear final stack frame so that */
  135. stwu r0, -4(r1) /* stack backtraces terminate cleanly */
  136. /* let the C-code set up the rest */
  137. /* */
  138. /* Be careful to keep code relocatable ! */
  139. /*--------------------------------------------------------------*/
  140. GET_GOT /* initialize GOT access */
  141. /* r3: IMMR */
  142. bl cpu_init_f /* run low-level CPU init code (in Flash)*/
  143. bl board_init_f /* run 1st part of board init code (in Flash)*/
  144. /* NOTREACHED - board_init_f() does not return */
  145. /*
  146. * Vector Table
  147. */
  148. .globl _start_of_vectors
  149. _start_of_vectors:
  150. /* Machine check */
  151. STD_EXCEPTION(0x200, MachineCheck, MachineCheckException)
  152. /* Data Storage exception. */
  153. STD_EXCEPTION(0x300, DataStorage, UnknownException)
  154. /* Instruction Storage exception. */
  155. STD_EXCEPTION(0x400, InstStorage, UnknownException)
  156. /* External Interrupt exception. */
  157. STD_EXCEPTION(0x500, ExtInterrupt, external_interrupt)
  158. /* Alignment exception. */
  159. . = 0x600
  160. Alignment:
  161. EXCEPTION_PROLOG(SRR0, SRR1)
  162. mfspr r4,DAR
  163. stw r4,_DAR(r21)
  164. mfspr r5,DSISR
  165. stw r5,_DSISR(r21)
  166. addi r3,r1,STACK_FRAME_OVERHEAD
  167. EXC_XFER_TEMPLATE(Alignment, AlignmentException, MSR_KERNEL, COPY_EE)
  168. /* Program check exception */
  169. . = 0x700
  170. ProgramCheck:
  171. EXCEPTION_PROLOG(SRR0, SRR1)
  172. addi r3,r1,STACK_FRAME_OVERHEAD
  173. EXC_XFER_TEMPLATE(ProgramCheck, ProgramCheckException,
  174. MSR_KERNEL, COPY_EE)
  175. STD_EXCEPTION(0x800, FPUnavailable, UnknownException)
  176. /* I guess we could implement decrementer, and may have
  177. * to someday for timekeeping.
  178. */
  179. STD_EXCEPTION(0x900, Decrementer, timer_interrupt)
  180. STD_EXCEPTION(0xa00, Trap_0a, UnknownException)
  181. STD_EXCEPTION(0xb00, Trap_0b, UnknownException)
  182. STD_EXCEPTION(0xc00, SystemCall, UnknownException)
  183. STD_EXCEPTION(0xd00, SingleStep, UnknownException)
  184. STD_EXCEPTION(0xe00, Trap_0e, UnknownException)
  185. STD_EXCEPTION(0xf00, Trap_0f, UnknownException)
  186. STD_EXCEPTION(0x1000, InstructionTLBMiss, UnknownException)
  187. STD_EXCEPTION(0x1100, DataLoadTLBMiss, UnknownException)
  188. STD_EXCEPTION(0x1200, DataStoreTLBMiss, UnknownException)
  189. #ifdef DEBUG
  190. . = 0x1300
  191. /*
  192. * This exception occurs when the program counter matches the
  193. * Instruction Address Breakpoint Register (IABR).
  194. *
  195. * I want the cpu to halt if this occurs so I can hunt around
  196. * with the debugger and look at things.
  197. *
  198. * When DEBUG is defined, both machine check enable (in the MSR)
  199. * and checkstop reset enable (in the reset mode register) are
  200. * turned off and so a checkstop condition will result in the cpu
  201. * halting.
  202. *
  203. * I force the cpu into a checkstop condition by putting an illegal
  204. * instruction here (at least this is the theory).
  205. *
  206. * well - that didnt work, so just do an infinite loop!
  207. */
  208. 1: b 1b
  209. #else
  210. STD_EXCEPTION(0x1300, InstructionBreakpoint, DebugException)
  211. #endif
  212. STD_EXCEPTION(0x1400, SMI, UnknownException)
  213. STD_EXCEPTION(0x1500, Trap_15, UnknownException)
  214. STD_EXCEPTION(0x1600, Trap_16, UnknownException)
  215. STD_EXCEPTION(0x1700, Trap_17, UnknownException)
  216. STD_EXCEPTION(0x1800, Trap_18, UnknownException)
  217. STD_EXCEPTION(0x1900, Trap_19, UnknownException)
  218. STD_EXCEPTION(0x1a00, Trap_1a, UnknownException)
  219. STD_EXCEPTION(0x1b00, Trap_1b, UnknownException)
  220. STD_EXCEPTION(0x1c00, Trap_1c, UnknownException)
  221. STD_EXCEPTION(0x1d00, Trap_1d, UnknownException)
  222. STD_EXCEPTION(0x1e00, Trap_1e, UnknownException)
  223. STD_EXCEPTION(0x1f00, Trap_1f, UnknownException)
  224. STD_EXCEPTION(0x2000, Trap_20, UnknownException)
  225. STD_EXCEPTION(0x2100, Trap_21, UnknownException)
  226. STD_EXCEPTION(0x2200, Trap_22, UnknownException)
  227. STD_EXCEPTION(0x2300, Trap_23, UnknownException)
  228. STD_EXCEPTION(0x2400, Trap_24, UnknownException)
  229. STD_EXCEPTION(0x2500, Trap_25, UnknownException)
  230. STD_EXCEPTION(0x2600, Trap_26, UnknownException)
  231. STD_EXCEPTION(0x2700, Trap_27, UnknownException)
  232. STD_EXCEPTION(0x2800, Trap_28, UnknownException)
  233. STD_EXCEPTION(0x2900, Trap_29, UnknownException)
  234. STD_EXCEPTION(0x2a00, Trap_2a, UnknownException)
  235. STD_EXCEPTION(0x2b00, Trap_2b, UnknownException)
  236. STD_EXCEPTION(0x2c00, Trap_2c, UnknownException)
  237. STD_EXCEPTION(0x2d00, Trap_2d, UnknownException)
  238. STD_EXCEPTION(0x2e00, Trap_2e, UnknownException)
  239. STD_EXCEPTION(0x2f00, Trap_2f, UnknownException)
  240. .globl _end_of_vectors
  241. _end_of_vectors:
  242. . = 0x3000
  243. /*
  244. * This code finishes saving the registers to the exception frame
  245. * and jumps to the appropriate handler for the exception.
  246. * Register r21 is pointer into trap frame, r1 has new stack pointer.
  247. */
  248. .globl transfer_to_handler
  249. transfer_to_handler:
  250. stw r22,_NIP(r21)
  251. lis r22,MSR_POW@h
  252. andc r23,r23,r22
  253. stw r23,_MSR(r21)
  254. SAVE_GPR(7, r21)
  255. SAVE_4GPRS(8, r21)
  256. SAVE_8GPRS(12, r21)
  257. SAVE_8GPRS(24, r21)
  258. mflr r23
  259. andi. r24,r23,0x3f00 /* get vector offset */
  260. stw r24,TRAP(r21)
  261. li r22,0
  262. stw r22,RESULT(r21)
  263. lwz r24,0(r23) /* virtual address of handler */
  264. lwz r23,4(r23) /* where to go when done */
  265. mtspr SRR0,r24
  266. mtspr SRR1,r20
  267. mtlr r23
  268. SYNC
  269. rfi /* jump to handler, enable MMU */
  270. int_return:
  271. mfmsr r28 /* Disable interrupts */
  272. li r4,0
  273. ori r4,r4,MSR_EE
  274. andc r28,r28,r4
  275. SYNC /* Some chip revs need this... */
  276. mtmsr r28
  277. SYNC
  278. lwz r2,_CTR(r1)
  279. lwz r0,_LINK(r1)
  280. mtctr r2
  281. mtlr r0
  282. lwz r2,_XER(r1)
  283. lwz r0,_CCR(r1)
  284. mtspr XER,r2
  285. mtcrf 0xFF,r0
  286. REST_10GPRS(3, r1)
  287. REST_10GPRS(13, r1)
  288. REST_8GPRS(23, r1)
  289. REST_GPR(31, r1)
  290. lwz r2,_NIP(r1) /* Restore environment */
  291. lwz r0,_MSR(r1)
  292. mtspr SRR0,r2
  293. mtspr SRR1,r0
  294. lwz r0,GPR0(r1)
  295. lwz r2,GPR2(r1)
  296. lwz r1,GPR1(r1)
  297. SYNC
  298. rfi
  299. /*
  300. * This code initialises the MPC5xxx processor core
  301. * (conforms to PowerPC 603e spec)
  302. * Note: expects original MSR contents to be in r5.
  303. */
  304. .globl init_5xx_core
  305. init_5xxx_core:
  306. /* Initialize machine status; enable machine check interrupt */
  307. /*--------------------------------------------------------------*/
  308. li r3, MSR_KERNEL /* Set ME and RI flags */
  309. rlwimi r3, r5, 0, 25, 25 /* preserve IP bit set by HRCW */
  310. #ifdef DEBUG
  311. rlwimi r3, r5, 0, 21, 22 /* debugger might set SE & BE bits */
  312. #endif
  313. SYNC /* Some chip revs need this... */
  314. mtmsr r3
  315. SYNC
  316. mtspr SRR1, r3 /* Make SRR1 match MSR */
  317. /* Initialize the Hardware Implementation-dependent Registers */
  318. /* HID0 also contains cache control */
  319. /*--------------------------------------------------------------*/
  320. lis r3, CONFIG_SYS_HID0_INIT@h
  321. ori r3, r3, CONFIG_SYS_HID0_INIT@l
  322. SYNC
  323. mtspr HID0, r3
  324. lis r3, CONFIG_SYS_HID0_FINAL@h
  325. ori r3, r3, CONFIG_SYS_HID0_FINAL@l
  326. SYNC
  327. mtspr HID0, r3
  328. /* clear all BAT's */
  329. /*--------------------------------------------------------------*/
  330. li r0, 0
  331. mtspr DBAT0U, r0
  332. mtspr DBAT0L, r0
  333. mtspr DBAT1U, r0
  334. mtspr DBAT1L, r0
  335. mtspr DBAT2U, r0
  336. mtspr DBAT2L, r0
  337. mtspr DBAT3U, r0
  338. mtspr DBAT3L, r0
  339. mtspr DBAT4U, r0
  340. mtspr DBAT4L, r0
  341. mtspr DBAT5U, r0
  342. mtspr DBAT5L, r0
  343. mtspr DBAT6U, r0
  344. mtspr DBAT6L, r0
  345. mtspr DBAT7U, r0
  346. mtspr DBAT7L, r0
  347. mtspr IBAT0U, r0
  348. mtspr IBAT0L, r0
  349. mtspr IBAT1U, r0
  350. mtspr IBAT1L, r0
  351. mtspr IBAT2U, r0
  352. mtspr IBAT2L, r0
  353. mtspr IBAT3U, r0
  354. mtspr IBAT3L, r0
  355. mtspr IBAT4U, r0
  356. mtspr IBAT4L, r0
  357. mtspr IBAT5U, r0
  358. mtspr IBAT5L, r0
  359. mtspr IBAT6U, r0
  360. mtspr IBAT6L, r0
  361. mtspr IBAT7U, r0
  362. mtspr IBAT7L, r0
  363. SYNC
  364. /* invalidate all tlb's */
  365. /* */
  366. /* From the 603e User Manual: "The 603e provides the ability to */
  367. /* invalidate a TLB entry. The TLB Invalidate Entry (tlbie) */
  368. /* instruction invalidates the TLB entry indexed by the EA, and */
  369. /* operates on both the instruction and data TLBs simultaneously*/
  370. /* invalidating four TLB entries (both sets in each TLB). The */
  371. /* index corresponds to bits 15-19 of the EA. To invalidate all */
  372. /* entries within both TLBs, 32 tlbie instructions should be */
  373. /* issued, incrementing this field by one each time." */
  374. /* */
  375. /* "Note that the tlbia instruction is not implemented on the */
  376. /* 603e." */
  377. /* */
  378. /* bits 15-19 correspond to addresses 0x00000000 to 0x0001F000 */
  379. /* incrementing by 0x1000 each time. The code below is sort of */
  380. /* based on code in "flush_tlbs" from arch/powerpc/kernel/head.S */
  381. /* */
  382. /*--------------------------------------------------------------*/
  383. li r3, 32
  384. mtctr r3
  385. li r3, 0
  386. 1: tlbie r3
  387. addi r3, r3, 0x1000
  388. bdnz 1b
  389. SYNC
  390. /* Done! */
  391. /*--------------------------------------------------------------*/
  392. blr
  393. /* Cache functions.
  394. *
  395. * Note: requires that all cache bits in
  396. * HID0 are in the low half word.
  397. */
  398. .globl icache_enable
  399. icache_enable:
  400. mfspr r3, HID0
  401. ori r3, r3, HID0_ICE
  402. lis r4, 0
  403. ori r4, r4, HID0_ILOCK
  404. andc r3, r3, r4
  405. ori r4, r3, HID0_ICFI
  406. isync
  407. mtspr HID0, r4 /* sets enable and invalidate, clears lock */
  408. isync
  409. mtspr HID0, r3 /* clears invalidate */
  410. blr
  411. .globl icache_disable
  412. icache_disable:
  413. mfspr r3, HID0
  414. lis r4, 0
  415. ori r4, r4, HID0_ICE|HID0_ILOCK
  416. andc r3, r3, r4
  417. ori r4, r3, HID0_ICFI
  418. isync
  419. mtspr HID0, r4 /* sets invalidate, clears enable and lock */
  420. isync
  421. mtspr HID0, r3 /* clears invalidate */
  422. blr
  423. .globl icache_status
  424. icache_status:
  425. mfspr r3, HID0
  426. rlwinm r3, r3, HID0_ICE_BITPOS + 1, 31, 31
  427. blr
  428. .globl dcache_enable
  429. dcache_enable:
  430. mfspr r3, HID0
  431. ori r3, r3, HID0_DCE
  432. lis r4, 0
  433. ori r4, r4, HID0_DLOCK
  434. andc r3, r3, r4
  435. ori r4, r3, HID0_DCI
  436. sync
  437. mtspr HID0, r4 /* sets enable and invalidate, clears lock */
  438. sync
  439. mtspr HID0, r3 /* clears invalidate */
  440. blr
  441. .globl dcache_disable
  442. dcache_disable:
  443. mfspr r3, HID0
  444. lis r4, 0
  445. ori r4, r4, HID0_DCE|HID0_DLOCK
  446. andc r3, r3, r4
  447. ori r4, r3, HID0_DCI
  448. sync
  449. mtspr HID0, r4 /* sets invalidate, clears enable and lock */
  450. sync
  451. mtspr HID0, r3 /* clears invalidate */
  452. blr
  453. .globl dcache_status
  454. dcache_status:
  455. mfspr r3, HID0
  456. rlwinm r3, r3, HID0_DCE_BITPOS + 1, 31, 31
  457. blr
  458. .globl get_svr
  459. get_svr:
  460. mfspr r3, SVR
  461. blr
  462. .globl get_pvr
  463. get_pvr:
  464. mfspr r3, PVR
  465. blr
  466. /*------------------------------------------------------------------------------*/
  467. /*
  468. * void relocate_code (addr_sp, gd, addr_moni)
  469. *
  470. * This "function" does not return, instead it continues in RAM
  471. * after relocating the monitor code.
  472. *
  473. * r3 = dest
  474. * r4 = src
  475. * r5 = length in bytes
  476. * r6 = cachelinesize
  477. */
  478. .globl relocate_code
  479. relocate_code:
  480. mr r1, r3 /* Set new stack pointer */
  481. mr r9, r4 /* Save copy of Global Data pointer */
  482. mr r10, r5 /* Save copy of Destination Address */
  483. GET_GOT
  484. mr r3, r5 /* Destination Address */
  485. lis r4, CONFIG_SYS_MONITOR_BASE@h /* Source Address */
  486. ori r4, r4, CONFIG_SYS_MONITOR_BASE@l
  487. lwz r5, GOT(__init_end)
  488. sub r5, r5, r4
  489. li r6, CONFIG_SYS_CACHELINE_SIZE /* Cache Line Size */
  490. /*
  491. * Fix GOT pointer:
  492. *
  493. * New GOT-PTR = (old GOT-PTR - CONFIG_SYS_MONITOR_BASE) + Destination Address
  494. *
  495. * Offset:
  496. */
  497. sub r15, r10, r4
  498. /* First our own GOT */
  499. add r12, r12, r15
  500. /* then the one used by the C code */
  501. add r30, r30, r15
  502. /*
  503. * Now relocate code
  504. */
  505. cmplw cr1,r3,r4
  506. addi r0,r5,3
  507. srwi. r0,r0,2
  508. beq cr1,4f /* In place copy is not necessary */
  509. beq 7f /* Protect against 0 count */
  510. mtctr r0
  511. bge cr1,2f
  512. la r8,-4(r4)
  513. la r7,-4(r3)
  514. 1: lwzu r0,4(r8)
  515. stwu r0,4(r7)
  516. bdnz 1b
  517. b 4f
  518. 2: slwi r0,r0,2
  519. add r8,r4,r0
  520. add r7,r3,r0
  521. 3: lwzu r0,-4(r8)
  522. stwu r0,-4(r7)
  523. bdnz 3b
  524. /*
  525. * Now flush the cache: note that we must start from a cache aligned
  526. * address. Otherwise we might miss one cache line.
  527. */
  528. 4: cmpwi r6,0
  529. add r5,r3,r5
  530. beq 7f /* Always flush prefetch queue in any case */
  531. subi r0,r6,1
  532. andc r3,r3,r0
  533. mfspr r7,HID0 /* don't do dcbst if dcache is disabled */
  534. rlwinm r7,r7,HID0_DCE_BITPOS+1,31,31
  535. cmpwi r7,0
  536. beq 9f
  537. mr r4,r3
  538. 5: dcbst 0,r4
  539. add r4,r4,r6
  540. cmplw r4,r5
  541. blt 5b
  542. sync /* Wait for all dcbst to complete on bus */
  543. 9: mfspr r7,HID0 /* don't do icbi if icache is disabled */
  544. rlwinm r7,r7,HID0_ICE_BITPOS+1,31,31
  545. cmpwi r7,0
  546. beq 7f
  547. mr r4,r3
  548. 6: icbi 0,r4
  549. add r4,r4,r6
  550. cmplw r4,r5
  551. blt 6b
  552. 7: sync /* Wait for all icbi to complete on bus */
  553. isync
  554. /*
  555. * We are done. Do not return, instead branch to second part of board
  556. * initialization, now running from RAM.
  557. */
  558. addi r0, r10, in_ram - _start + EXC_OFF_SYS_RESET
  559. mtlr r0
  560. blr
  561. in_ram:
  562. /*
  563. * Relocation Function, r12 point to got2+0x8000
  564. *
  565. * Adjust got2 pointers, no need to check for 0, this code
  566. * already puts a few entries in the table.
  567. */
  568. li r0,__got2_entries@sectoff@l
  569. la r3,GOT(_GOT2_TABLE_)
  570. lwz r11,GOT(_GOT2_TABLE_)
  571. mtctr r0
  572. sub r11,r3,r11
  573. addi r3,r3,-4
  574. 1: lwzu r0,4(r3)
  575. cmpwi r0,0
  576. beq- 2f
  577. add r0,r0,r11
  578. stw r0,0(r3)
  579. 2: bdnz 1b
  580. /*
  581. * Now adjust the fixups and the pointers to the fixups
  582. * in case we need to move ourselves again.
  583. */
  584. li r0,__fixup_entries@sectoff@l
  585. lwz r3,GOT(_FIXUP_TABLE_)
  586. cmpwi r0,0
  587. mtctr r0
  588. addi r3,r3,-4
  589. beq 4f
  590. 3: lwzu r4,4(r3)
  591. lwzux r0,r4,r11
  592. cmpwi r0,0
  593. add r0,r0,r11
  594. stw r10,0(r3)
  595. beq- 5f
  596. stw r0,0(r4)
  597. 5: bdnz 3b
  598. 4:
  599. clear_bss:
  600. /*
  601. * Now clear BSS segment
  602. */
  603. lwz r3,GOT(__bss_start)
  604. lwz r4,GOT(_end)
  605. cmplw 0, r3, r4
  606. beq 6f
  607. li r0, 0
  608. 5:
  609. stw r0, 0(r3)
  610. addi r3, r3, 4
  611. cmplw 0, r3, r4
  612. bne 5b
  613. 6:
  614. mr r3, r9 /* Global Data pointer */
  615. mr r4, r10 /* Destination Address */
  616. bl board_init_r
  617. /*
  618. * Copy exception vector code to low memory
  619. *
  620. * r3: dest_addr
  621. * r7: source address, r8: end address, r9: target address
  622. */
  623. .globl trap_init
  624. trap_init:
  625. mflr r4 /* save link register */
  626. GET_GOT
  627. lwz r7, GOT(_start)
  628. lwz r8, GOT(_end_of_vectors)
  629. li r9, 0x100 /* reset vector always at 0x100 */
  630. cmplw 0, r7, r8
  631. bgelr /* return if r7>=r8 - just in case */
  632. 1:
  633. lwz r0, 0(r7)
  634. stw r0, 0(r9)
  635. addi r7, r7, 4
  636. addi r9, r9, 4
  637. cmplw 0, r7, r8
  638. bne 1b
  639. /*
  640. * relocate `hdlr' and `int_return' entries
  641. */
  642. li r7, .L_MachineCheck - _start + EXC_OFF_SYS_RESET
  643. li r8, Alignment - _start + EXC_OFF_SYS_RESET
  644. 2:
  645. bl trap_reloc
  646. addi r7, r7, 0x100 /* next exception vector */
  647. cmplw 0, r7, r8
  648. blt 2b
  649. li r7, .L_Alignment - _start + EXC_OFF_SYS_RESET
  650. bl trap_reloc
  651. li r7, .L_ProgramCheck - _start + EXC_OFF_SYS_RESET
  652. bl trap_reloc
  653. li r7, .L_FPUnavailable - _start + EXC_OFF_SYS_RESET
  654. li r8, SystemCall - _start + EXC_OFF_SYS_RESET
  655. 3:
  656. bl trap_reloc
  657. addi r7, r7, 0x100 /* next exception vector */
  658. cmplw 0, r7, r8
  659. blt 3b
  660. li r7, .L_SingleStep - _start + EXC_OFF_SYS_RESET
  661. li r8, _end_of_vectors - _start + EXC_OFF_SYS_RESET
  662. 4:
  663. bl trap_reloc
  664. addi r7, r7, 0x100 /* next exception vector */
  665. cmplw 0, r7, r8
  666. blt 4b
  667. mfmsr r3 /* now that the vectors have */
  668. lis r7, MSR_IP@h /* relocated into low memory */
  669. ori r7, r7, MSR_IP@l /* MSR[IP] can be turned off */
  670. andc r3, r3, r7 /* (if it was on) */
  671. SYNC /* Some chip revs need this... */
  672. mtmsr r3
  673. SYNC
  674. mtlr r4 /* restore link register */
  675. blr