fw_base.S 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835
  1. /*
  2. * SPDX-License-Identifier: BSD-2-Clause
  3. *
  4. * Copyright (c) 2019 Western Digital Corporation or its affiliates.
  5. *
  6. * Authors:
  7. * Anup Patel <anup.patel@wdc.com>
  8. */
  9. #include <sbi/riscv_asm.h>
  10. #include <sbi/riscv_encoding.h>
  11. #include <sbi/riscv_elf.h>
  12. #include <sbi/sbi_platform.h>
  13. #include <sbi/sbi_scratch.h>
  14. #include <sbi/sbi_trap.h>
  15. #define BOOT_STATUS_RELOCATE_DONE 1
  16. #define BOOT_STATUS_BOOT_HART_DONE 2
  17. .macro MOV_3R __d0, __s0, __d1, __s1, __d2, __s2
  18. add \__d0, \__s0, zero
  19. add \__d1, \__s1, zero
  20. add \__d2, \__s2, zero
  21. .endm
  22. .macro MOV_5R __d0, __s0, __d1, __s1, __d2, __s2, __d3, __s3, __d4, __s4
  23. add \__d0, \__s0, zero
  24. add \__d1, \__s1, zero
  25. add \__d2, \__s2, zero
  26. add \__d3, \__s3, zero
  27. add \__d4, \__s4, zero
  28. .endm
  29. /*
  30. * If __start_reg <= __check_reg and __check_reg < __end_reg then
  31. * jump to __pass
  32. */
  33. .macro BRANGE __start_reg, __end_reg, __check_reg, __jump_lable
  34. blt \__check_reg, \__start_reg, 999f
  35. bge \__check_reg, \__end_reg, 999f
  36. j \__jump_lable
  37. 999:
  38. .endm
  39. .section .entry, "ax", %progbits
  40. .align 3
  41. .globl _start
  42. .globl _start_warm
  43. _start:
  44. /* Find preferred boot HART id */
  45. MOV_3R s0, a0, s1, a1, s2, a2
  46. call fw_boot_hart
  47. add a6, a0, zero
  48. MOV_3R a0, s0, a1, s1, a2, s2
  49. li a7, -1
  50. beq a6, a7, _try_lottery
  51. /* Jump to relocation wait loop if we are not boot hart */
  52. bne a0, a6, _wait_relocate_copy_done
  53. _try_lottery:
  54. /* Jump to relocation wait loop if we don't get relocation lottery */
  55. lla a6, _relocate_lottery
  56. li a7, 1
  57. amoadd.w a6, a7, (a6)
  58. bnez a6, _wait_relocate_copy_done
  59. /* Save load address */
  60. lla t0, _load_start
  61. lla t1, _fw_start
  62. REG_S t1, 0(t0)
  63. #ifdef FW_PIC
  64. /* relocate the global table content */
  65. lla t0, _link_start
  66. REG_L t0, 0(t0)
  67. /* t1 shall has the address of _fw_start */
  68. sub t2, t1, t0
  69. lla t3, _runtime_offset
  70. REG_S t2, (t3)
  71. lla t0, __rel_dyn_start
  72. lla t1, __rel_dyn_end
  73. beq t0, t1, _relocate_done
  74. j 5f
  75. 2:
  76. REG_L t5, -(REGBYTES*2)(t0) /* t5 <-- relocation info:type */
  77. li t3, R_RISCV_RELATIVE /* reloc type R_RISCV_RELATIVE */
  78. bne t5, t3, 3f
  79. REG_L t3, -(REGBYTES*3)(t0)
  80. REG_L t5, -(REGBYTES)(t0) /* t5 <-- addend */
  81. add t5, t5, t2
  82. add t3, t3, t2
  83. REG_S t5, 0(t3) /* store runtime address to the GOT entry */
  84. j 5f
  85. 3:
  86. lla t4, __dyn_sym_start
  87. 4:
  88. REG_L t5, -(REGBYTES*2)(t0) /* t5 <-- relocation info:type */
  89. srli t6, t5, SYM_INDEX /* t6 <--- sym table index */
  90. andi t5, t5, 0xFF /* t5 <--- relocation type */
  91. li t3, RELOC_TYPE
  92. bne t5, t3, 5f
  93. /* address R_RISCV_64 or R_RISCV_32 cases*/
  94. REG_L t3, -(REGBYTES*3)(t0)
  95. li t5, SYM_SIZE
  96. mul t6, t6, t5
  97. add s5, t4, t6
  98. REG_L t6, -(REGBYTES)(t0) /* t0 <-- addend */
  99. REG_L t5, REGBYTES(s5)
  100. add t5, t5, t6
  101. add t5, t5, t2 /* t5 <-- location to fix up in RAM */
  102. add t3, t3, t2 /* t3 <-- location to fix up in RAM */
  103. REG_S t5, 0(t3) /* store runtime address to the variable */
  104. 5:
  105. addi t0, t0, (REGBYTES*3)
  106. ble t0, t1, 2b
  107. j _relocate_done
  108. _wait_relocate_copy_done:
  109. j _wait_for_boot_hart
  110. #else
  111. /* Relocate if load address != link address */
  112. _relocate:
  113. lla t0, _link_start
  114. REG_L t0, 0(t0)
  115. lla t1, _link_end
  116. REG_L t1, 0(t1)
  117. lla t2, _load_start
  118. REG_L t2, 0(t2)
  119. sub t3, t1, t0
  120. add t3, t3, t2
  121. beq t0, t2, _relocate_done
  122. lla t4, _relocate_done
  123. sub t4, t4, t2
  124. add t4, t4, t0
  125. blt t2, t0, _relocate_copy_to_upper
  126. _relocate_copy_to_lower:
  127. ble t1, t2, _relocate_copy_to_lower_loop
  128. lla t3, _relocate_lottery
  129. BRANGE t2, t1, t3, _start_hang
  130. lla t3, _boot_status
  131. BRANGE t2, t1, t3, _start_hang
  132. lla t3, _relocate
  133. lla t5, _relocate_done
  134. BRANGE t2, t1, t3, _start_hang
  135. BRANGE t2, t1, t5, _start_hang
  136. BRANGE t3, t5, t2, _start_hang
  137. _relocate_copy_to_lower_loop:
  138. REG_L t3, 0(t2)
  139. REG_S t3, 0(t0)
  140. add t0, t0, __SIZEOF_POINTER__
  141. add t2, t2, __SIZEOF_POINTER__
  142. blt t0, t1, _relocate_copy_to_lower_loop
  143. jr t4
  144. _relocate_copy_to_upper:
  145. ble t3, t0, _relocate_copy_to_upper_loop
  146. lla t2, _relocate_lottery
  147. BRANGE t0, t3, t2, _start_hang
  148. lla t2, _boot_status
  149. BRANGE t0, t3, t2, _start_hang
  150. lla t2, _relocate
  151. lla t5, _relocate_done
  152. BRANGE t0, t3, t2, _start_hang
  153. BRANGE t0, t3, t5, _start_hang
  154. BRANGE t2, t5, t0, _start_hang
  155. _relocate_copy_to_upper_loop:
  156. add t3, t3, -__SIZEOF_POINTER__
  157. add t1, t1, -__SIZEOF_POINTER__
  158. REG_L t2, 0(t3)
  159. REG_S t2, 0(t1)
  160. blt t0, t1, _relocate_copy_to_upper_loop
  161. jr t4
  162. _wait_relocate_copy_done:
  163. lla t0, _fw_start
  164. lla t1, _link_start
  165. REG_L t1, 0(t1)
  166. beq t0, t1, _wait_for_boot_hart
  167. lla t2, _boot_status
  168. lla t3, _wait_for_boot_hart
  169. sub t3, t3, t0
  170. add t3, t3, t1
  171. 1:
  172. /* waitting for relocate copy done (_boot_status == 1) */
  173. li t4, BOOT_STATUS_RELOCATE_DONE
  174. REG_L t5, 0(t2)
  175. /* Reduce the bus traffic so that boot hart may proceed faster */
  176. nop
  177. nop
  178. nop
  179. bgt t4, t5, 1b
  180. jr t3
  181. #endif
  182. _relocate_done:
  183. /*
  184. * Mark relocate copy done
  185. * Use _boot_status copy relative to the load address
  186. */
  187. lla t0, _boot_status
  188. #ifndef FW_PIC
  189. lla t1, _link_start
  190. REG_L t1, 0(t1)
  191. lla t2, _load_start
  192. REG_L t2, 0(t2)
  193. sub t0, t0, t1
  194. add t0, t0, t2
  195. #endif
  196. li t1, BOOT_STATUS_RELOCATE_DONE
  197. REG_S t1, 0(t0)
  198. fence rw, rw
  199. /* At this point we are running from link address */
  200. /* Reset all registers for boot HART */
  201. li ra, 0
  202. call _reset_regs
  203. /* Zero-out BSS */
  204. lla s4, _bss_start
  205. lla s5, _bss_end
  206. _bss_zero:
  207. REG_S zero, (s4)
  208. add s4, s4, __SIZEOF_POINTER__
  209. blt s4, s5, _bss_zero
  210. /* Setup temporary trap handler */
  211. lla s4, _start_hang
  212. csrw CSR_MTVEC, s4
  213. /* Setup temporary stack */
  214. lla s4, _fw_end
  215. li s5, (SBI_SCRATCH_SIZE * 2)
  216. add sp, s4, s5
  217. /* Allow main firmware to save info */
  218. MOV_5R s0, a0, s1, a1, s2, a2, s3, a3, s4, a4
  219. call fw_save_info
  220. MOV_5R a0, s0, a1, s1, a2, s2, a3, s3, a4, s4
  221. #ifdef FW_FDT_PATH
  222. /* Override previous arg1 */
  223. lla a1, fw_fdt_bin
  224. #endif
  225. /*
  226. * Initialize platform
  227. * Note: The a0 to a4 registers passed to the
  228. * firmware are parameters to this function.
  229. */
  230. MOV_5R s0, a0, s1, a1, s2, a2, s3, a3, s4, a4
  231. call fw_platform_init
  232. add t0, a0, zero
  233. MOV_5R a0, s0, a1, s1, a2, s2, a3, s3, a4, s4
  234. add a1, t0, zero
  235. /* Preload HART details
  236. * s7 -> HART Count
  237. * s8 -> HART Stack Size
  238. */
  239. lla a4, platform
  240. #if __riscv_xlen == 64
  241. lwu s7, SBI_PLATFORM_HART_COUNT_OFFSET(a4)
  242. lwu s8, SBI_PLATFORM_HART_STACK_SIZE_OFFSET(a4)
  243. #else
  244. lw s7, SBI_PLATFORM_HART_COUNT_OFFSET(a4)
  245. lw s8, SBI_PLATFORM_HART_STACK_SIZE_OFFSET(a4)
  246. #endif
  247. /* Setup scratch space for all the HARTs*/
  248. lla tp, _fw_end
  249. mul a5, s7, s8
  250. add tp, tp, a5
  251. /* Keep a copy of tp */
  252. add t3, tp, zero
  253. /* Counter */
  254. li t2, 1
  255. /* hartid 0 is mandated by ISA */
  256. li t1, 0
  257. _scratch_init:
  258. /*
  259. * The following registers hold values that are computed before
  260. * entering this block, and should remain unchanged.
  261. *
  262. * t3 -> the firmware end address
  263. * s7 -> HART count
  264. * s8 -> HART stack size
  265. */
  266. add tp, t3, zero
  267. mul a5, s8, t1
  268. sub tp, tp, a5
  269. li a5, SBI_SCRATCH_SIZE
  270. sub tp, tp, a5
  271. /* Initialize scratch space */
  272. /* Store fw_start and fw_size in scratch space */
  273. lla a4, _fw_start
  274. sub a5, t3, a4
  275. REG_S a4, SBI_SCRATCH_FW_START_OFFSET(tp)
  276. REG_S a5, SBI_SCRATCH_FW_SIZE_OFFSET(tp)
  277. /* Store next arg1 in scratch space */
  278. MOV_3R s0, a0, s1, a1, s2, a2
  279. call fw_next_arg1
  280. REG_S a0, SBI_SCRATCH_NEXT_ARG1_OFFSET(tp)
  281. MOV_3R a0, s0, a1, s1, a2, s2
  282. /* Store next address in scratch space */
  283. MOV_3R s0, a0, s1, a1, s2, a2
  284. call fw_next_addr
  285. REG_S a0, SBI_SCRATCH_NEXT_ADDR_OFFSET(tp)
  286. MOV_3R a0, s0, a1, s1, a2, s2
  287. /* Store next mode in scratch space */
  288. MOV_3R s0, a0, s1, a1, s2, a2
  289. call fw_next_mode
  290. REG_S a0, SBI_SCRATCH_NEXT_MODE_OFFSET(tp)
  291. MOV_3R a0, s0, a1, s1, a2, s2
  292. /* Store warm_boot address in scratch space */
  293. lla a4, _start_warm
  294. REG_S a4, SBI_SCRATCH_WARMBOOT_ADDR_OFFSET(tp)
  295. /* Store platform address in scratch space */
  296. lla a4, platform
  297. REG_S a4, SBI_SCRATCH_PLATFORM_ADDR_OFFSET(tp)
  298. /* Store hartid-to-scratch function address in scratch space */
  299. lla a4, _hartid_to_scratch
  300. REG_S a4, SBI_SCRATCH_HARTID_TO_SCRATCH_OFFSET(tp)
  301. /* Store trap-exit function address in scratch space */
  302. lla a4, _trap_exit
  303. REG_S a4, SBI_SCRATCH_TRAP_EXIT_OFFSET(tp)
  304. /* Clear tmp0 in scratch space */
  305. REG_S zero, SBI_SCRATCH_TMP0_OFFSET(tp)
  306. /* Store firmware options in scratch space */
  307. MOV_3R s0, a0, s1, a1, s2, a2
  308. #ifdef FW_OPTIONS
  309. li a0, FW_OPTIONS
  310. #else
  311. call fw_options
  312. #endif
  313. REG_S a0, SBI_SCRATCH_OPTIONS_OFFSET(tp)
  314. MOV_3R a0, s0, a1, s1, a2, s2
  315. /* Move to next scratch space */
  316. add t1, t1, t2
  317. blt t1, s7, _scratch_init
  318. /*
  319. * Relocate Flatened Device Tree (FDT)
  320. * source FDT address = previous arg1
  321. * destination FDT address = next arg1
  322. *
  323. * Note: We will preserve a0 and a1 passed by
  324. * previous booting stage.
  325. */
  326. beqz a1, _fdt_reloc_done
  327. /* Mask values in a4 */
  328. li a4, 0xff
  329. /* t1 = destination FDT start address */
  330. MOV_3R s0, a0, s1, a1, s2, a2
  331. call fw_next_arg1
  332. add t1, a0, zero
  333. MOV_3R a0, s0, a1, s1, a2, s2
  334. beqz t1, _fdt_reloc_done
  335. beq t1, a1, _fdt_reloc_done
  336. /* t0 = source FDT start address */
  337. add t0, a1, zero
  338. /* t2 = source FDT size in big-endian */
  339. #if __riscv_xlen == 64
  340. lwu t2, 4(t0)
  341. #else
  342. lw t2, 4(t0)
  343. #endif
  344. /* t3 = bit[15:8] of FDT size */
  345. add t3, t2, zero
  346. srli t3, t3, 16
  347. and t3, t3, a4
  348. slli t3, t3, 8
  349. /* t4 = bit[23:16] of FDT size */
  350. add t4, t2, zero
  351. srli t4, t4, 8
  352. and t4, t4, a4
  353. slli t4, t4, 16
  354. /* t5 = bit[31:24] of FDT size */
  355. add t5, t2, zero
  356. and t5, t5, a4
  357. slli t5, t5, 24
  358. /* t2 = bit[7:0] of FDT size */
  359. srli t2, t2, 24
  360. and t2, t2, a4
  361. /* t2 = FDT size in little-endian */
  362. or t2, t2, t3
  363. or t2, t2, t4
  364. or t2, t2, t5
  365. /* t2 = destination FDT end address */
  366. add t2, t1, t2
  367. /* FDT copy loop */
  368. ble t2, t1, _fdt_reloc_done
  369. _fdt_reloc_again:
  370. REG_L t3, 0(t0)
  371. REG_S t3, 0(t1)
  372. add t0, t0, __SIZEOF_POINTER__
  373. add t1, t1, __SIZEOF_POINTER__
  374. blt t1, t2, _fdt_reloc_again
  375. _fdt_reloc_done:
  376. /* mark boot hart done */
  377. li t0, BOOT_STATUS_BOOT_HART_DONE
  378. lla t1, _boot_status
  379. REG_S t0, 0(t1)
  380. fence rw, rw
  381. j _start_warm
  382. /* waiting for boot hart to be done (_boot_status == 2) */
  383. _wait_for_boot_hart:
  384. li t0, BOOT_STATUS_BOOT_HART_DONE
  385. lla t1, _boot_status
  386. REG_L t1, 0(t1)
  387. /* Reduce the bus traffic so that boot hart may proceed faster */
  388. nop
  389. nop
  390. nop
  391. bne t0, t1, _wait_for_boot_hart
  392. _start_warm:
  393. /* Reset all registers for non-boot HARTs */
  394. li ra, 0
  395. call _reset_regs
  396. /* Disable and clear all interrupts */
  397. csrw CSR_MIE, zero
  398. csrw CSR_MIP, zero
  399. /* Find HART count and HART stack size */
  400. lla a4, platform
  401. #if __riscv_xlen == 64
  402. lwu s7, SBI_PLATFORM_HART_COUNT_OFFSET(a4)
  403. lwu s8, SBI_PLATFORM_HART_STACK_SIZE_OFFSET(a4)
  404. #else
  405. lw s7, SBI_PLATFORM_HART_COUNT_OFFSET(a4)
  406. lw s8, SBI_PLATFORM_HART_STACK_SIZE_OFFSET(a4)
  407. #endif
  408. REG_L s9, SBI_PLATFORM_HART_INDEX2ID_OFFSET(a4)
  409. /* Find HART id */
  410. csrr s6, CSR_MHARTID
  411. /* Find HART index */
  412. beqz s9, 3f
  413. li a4, 0
  414. 1:
  415. #if __riscv_xlen == 64
  416. lwu a5, (s9)
  417. #else
  418. lw a5, (s9)
  419. #endif
  420. beq a5, s6, 2f
  421. add s9, s9, 4
  422. add a4, a4, 1
  423. blt a4, s7, 1b
  424. li a4, -1
  425. 2: add s6, a4, zero
  426. 3: bge s6, s7, _start_hang
  427. /* Find the scratch space based on HART index */
  428. lla tp, _fw_end
  429. mul a5, s7, s8
  430. add tp, tp, a5
  431. mul a5, s8, s6
  432. sub tp, tp, a5
  433. li a5, SBI_SCRATCH_SIZE
  434. sub tp, tp, a5
  435. /* update the mscratch */
  436. csrw CSR_MSCRATCH, tp
  437. /* Setup stack */
  438. add sp, tp, zero
  439. /* Setup trap handler */
  440. lla a4, _trap_handler
  441. #if __riscv_xlen == 32
  442. csrr a5, CSR_MISA
  443. srli a5, a5, ('H' - 'A')
  444. andi a5, a5, 0x1
  445. beq a5, zero, _skip_trap_handler_rv32_hyp
  446. lla a4, _trap_handler_rv32_hyp
  447. _skip_trap_handler_rv32_hyp:
  448. #endif
  449. csrw CSR_MTVEC, a4
  450. #if __riscv_xlen == 32
  451. /* Override trap exit for H-extension */
  452. csrr a5, CSR_MISA
  453. srli a5, a5, ('H' - 'A')
  454. andi a5, a5, 0x1
  455. beq a5, zero, _skip_trap_exit_rv32_hyp
  456. lla a4, _trap_exit_rv32_hyp
  457. csrr a5, CSR_MSCRATCH
  458. REG_S a4, SBI_SCRATCH_TRAP_EXIT_OFFSET(a5)
  459. _skip_trap_exit_rv32_hyp:
  460. #endif
  461. /* Initialize SBI runtime */
  462. csrr a0, CSR_MSCRATCH
  463. call sbi_init
  464. /* We don't expect to reach here hence just hang */
  465. j _start_hang
  466. .data
  467. .align 3
  468. #ifdef FW_PIC
  469. _runtime_offset:
  470. RISCV_PTR 0
  471. #endif
  472. _relocate_lottery:
  473. RISCV_PTR 0
  474. _boot_status:
  475. RISCV_PTR 0
  476. _load_start:
  477. RISCV_PTR _fw_start
  478. _link_start:
  479. RISCV_PTR FW_TEXT_START
  480. _link_end:
  481. RISCV_PTR _fw_reloc_end
  482. .section .entry, "ax", %progbits
  483. .align 3
  484. .globl _hartid_to_scratch
  485. _hartid_to_scratch:
  486. /*
  487. * a0 -> HART ID (passed by caller)
  488. * a1 -> HART Index (passed by caller)
  489. * t0 -> HART Stack Size
  490. * t1 -> HART Stack End
  491. * t2 -> Temporary
  492. */
  493. lla t2, platform
  494. #if __riscv_xlen == 64
  495. lwu t0, SBI_PLATFORM_HART_STACK_SIZE_OFFSET(t2)
  496. lwu t2, SBI_PLATFORM_HART_COUNT_OFFSET(t2)
  497. #else
  498. lw t0, SBI_PLATFORM_HART_STACK_SIZE_OFFSET(t2)
  499. lw t2, SBI_PLATFORM_HART_COUNT_OFFSET(t2)
  500. #endif
  501. sub t2, t2, a1
  502. mul t2, t2, t0
  503. lla t1, _fw_end
  504. add t1, t1, t2
  505. li t2, SBI_SCRATCH_SIZE
  506. sub a0, t1, t2
  507. ret
  508. .section .entry, "ax", %progbits
  509. .align 3
  510. .globl _start_hang
  511. _start_hang:
  512. wfi
  513. j _start_hang
  514. .section .entry, "ax", %progbits
  515. .align 3
  516. .weak fw_platform_init
  517. fw_platform_init:
  518. add a0, a1, zero
  519. ret
  520. /* Map implicit memcpy() added by compiler to sbi_memcpy() */
  521. .section .text
  522. .align 3
  523. .globl memcpy
  524. memcpy:
  525. tail sbi_memcpy
  526. /* Map implicit memset() added by compiler to sbi_memset() */
  527. .section .text
  528. .align 3
  529. .globl memset
  530. memset:
  531. tail sbi_memset
  532. /* Map implicit memmove() added by compiler to sbi_memmove() */
  533. .section .text
  534. .align 3
  535. .globl memmove
  536. memmove:
  537. tail sbi_memmove
  538. /* Map implicit memcmp() added by compiler to sbi_memcmp() */
  539. .section .text
  540. .align 3
  541. .globl memcmp
  542. memcmp:
  543. tail sbi_memcmp
  544. .macro TRAP_SAVE_AND_SETUP_SP_T0
  545. /* Swap TP and MSCRATCH */
  546. csrrw tp, CSR_MSCRATCH, tp
  547. /* Save T0 in scratch space */
  548. REG_S t0, SBI_SCRATCH_TMP0_OFFSET(tp)
  549. /*
  550. * Set T0 to appropriate exception stack
  551. *
  552. * Came_From_M_Mode = ((MSTATUS.MPP < PRV_M) ? 1 : 0) - 1;
  553. * Exception_Stack = TP ^ (Came_From_M_Mode & (SP ^ TP))
  554. *
  555. * Came_From_M_Mode = 0 ==> Exception_Stack = TP
  556. * Came_From_M_Mode = -1 ==> Exception_Stack = SP
  557. */
  558. csrr t0, CSR_MSTATUS
  559. srl t0, t0, MSTATUS_MPP_SHIFT
  560. and t0, t0, PRV_M
  561. slti t0, t0, PRV_M
  562. add t0, t0, -1
  563. xor sp, sp, tp
  564. and t0, t0, sp
  565. xor sp, sp, tp
  566. xor t0, tp, t0
  567. /* Save original SP on exception stack */
  568. REG_S sp, (SBI_TRAP_REGS_OFFSET(sp) - SBI_TRAP_REGS_SIZE)(t0)
  569. /* Set SP to exception stack and make room for trap registers */
  570. add sp, t0, -(SBI_TRAP_REGS_SIZE)
  571. /* Restore T0 from scratch space */
  572. REG_L t0, SBI_SCRATCH_TMP0_OFFSET(tp)
  573. /* Save T0 on stack */
  574. REG_S t0, SBI_TRAP_REGS_OFFSET(t0)(sp)
  575. /* Swap TP and MSCRATCH */
  576. csrrw tp, CSR_MSCRATCH, tp
  577. .endm
  578. .macro TRAP_SAVE_MEPC_MSTATUS have_mstatush
  579. /* Save MEPC and MSTATUS CSRs */
  580. csrr t0, CSR_MEPC
  581. REG_S t0, SBI_TRAP_REGS_OFFSET(mepc)(sp)
  582. csrr t0, CSR_MSTATUS
  583. REG_S t0, SBI_TRAP_REGS_OFFSET(mstatus)(sp)
  584. .if \have_mstatush
  585. csrr t0, CSR_MSTATUSH
  586. REG_S t0, SBI_TRAP_REGS_OFFSET(mstatusH)(sp)
  587. .else
  588. REG_S zero, SBI_TRAP_REGS_OFFSET(mstatusH)(sp)
  589. .endif
  590. .endm
  591. .macro TRAP_SAVE_GENERAL_REGS_EXCEPT_SP_T0
  592. /* Save all general regisers except SP and T0 */
  593. REG_S zero, SBI_TRAP_REGS_OFFSET(zero)(sp)
  594. REG_S ra, SBI_TRAP_REGS_OFFSET(ra)(sp)
  595. REG_S gp, SBI_TRAP_REGS_OFFSET(gp)(sp)
  596. REG_S tp, SBI_TRAP_REGS_OFFSET(tp)(sp)
  597. REG_S t1, SBI_TRAP_REGS_OFFSET(t1)(sp)
  598. REG_S t2, SBI_TRAP_REGS_OFFSET(t2)(sp)
  599. REG_S s0, SBI_TRAP_REGS_OFFSET(s0)(sp)
  600. REG_S s1, SBI_TRAP_REGS_OFFSET(s1)(sp)
  601. REG_S a0, SBI_TRAP_REGS_OFFSET(a0)(sp)
  602. REG_S a1, SBI_TRAP_REGS_OFFSET(a1)(sp)
  603. REG_S a2, SBI_TRAP_REGS_OFFSET(a2)(sp)
  604. REG_S a3, SBI_TRAP_REGS_OFFSET(a3)(sp)
  605. REG_S a4, SBI_TRAP_REGS_OFFSET(a4)(sp)
  606. REG_S a5, SBI_TRAP_REGS_OFFSET(a5)(sp)
  607. REG_S a6, SBI_TRAP_REGS_OFFSET(a6)(sp)
  608. REG_S a7, SBI_TRAP_REGS_OFFSET(a7)(sp)
  609. REG_S s2, SBI_TRAP_REGS_OFFSET(s2)(sp)
  610. REG_S s3, SBI_TRAP_REGS_OFFSET(s3)(sp)
  611. REG_S s4, SBI_TRAP_REGS_OFFSET(s4)(sp)
  612. REG_S s5, SBI_TRAP_REGS_OFFSET(s5)(sp)
  613. REG_S s6, SBI_TRAP_REGS_OFFSET(s6)(sp)
  614. REG_S s7, SBI_TRAP_REGS_OFFSET(s7)(sp)
  615. REG_S s8, SBI_TRAP_REGS_OFFSET(s8)(sp)
  616. REG_S s9, SBI_TRAP_REGS_OFFSET(s9)(sp)
  617. REG_S s10, SBI_TRAP_REGS_OFFSET(s10)(sp)
  618. REG_S s11, SBI_TRAP_REGS_OFFSET(s11)(sp)
  619. REG_S t3, SBI_TRAP_REGS_OFFSET(t3)(sp)
  620. REG_S t4, SBI_TRAP_REGS_OFFSET(t4)(sp)
  621. REG_S t5, SBI_TRAP_REGS_OFFSET(t5)(sp)
  622. REG_S t6, SBI_TRAP_REGS_OFFSET(t6)(sp)
  623. .endm
  624. .macro TRAP_CALL_C_ROUTINE
  625. /* Call C routine */
  626. add a0, sp, zero
  627. call sbi_trap_handler
  628. .endm
  629. .macro TRAP_RESTORE_GENERAL_REGS_EXCEPT_A0_T0
  630. /* Restore all general regisers except A0 and T0 */
  631. REG_L ra, SBI_TRAP_REGS_OFFSET(ra)(a0)
  632. REG_L sp, SBI_TRAP_REGS_OFFSET(sp)(a0)
  633. REG_L gp, SBI_TRAP_REGS_OFFSET(gp)(a0)
  634. REG_L tp, SBI_TRAP_REGS_OFFSET(tp)(a0)
  635. REG_L t1, SBI_TRAP_REGS_OFFSET(t1)(a0)
  636. REG_L t2, SBI_TRAP_REGS_OFFSET(t2)(a0)
  637. REG_L s0, SBI_TRAP_REGS_OFFSET(s0)(a0)
  638. REG_L s1, SBI_TRAP_REGS_OFFSET(s1)(a0)
  639. REG_L a1, SBI_TRAP_REGS_OFFSET(a1)(a0)
  640. REG_L a2, SBI_TRAP_REGS_OFFSET(a2)(a0)
  641. REG_L a3, SBI_TRAP_REGS_OFFSET(a3)(a0)
  642. REG_L a4, SBI_TRAP_REGS_OFFSET(a4)(a0)
  643. REG_L a5, SBI_TRAP_REGS_OFFSET(a5)(a0)
  644. REG_L a6, SBI_TRAP_REGS_OFFSET(a6)(a0)
  645. REG_L a7, SBI_TRAP_REGS_OFFSET(a7)(a0)
  646. REG_L s2, SBI_TRAP_REGS_OFFSET(s2)(a0)
  647. REG_L s3, SBI_TRAP_REGS_OFFSET(s3)(a0)
  648. REG_L s4, SBI_TRAP_REGS_OFFSET(s4)(a0)
  649. REG_L s5, SBI_TRAP_REGS_OFFSET(s5)(a0)
  650. REG_L s6, SBI_TRAP_REGS_OFFSET(s6)(a0)
  651. REG_L s7, SBI_TRAP_REGS_OFFSET(s7)(a0)
  652. REG_L s8, SBI_TRAP_REGS_OFFSET(s8)(a0)
  653. REG_L s9, SBI_TRAP_REGS_OFFSET(s9)(a0)
  654. REG_L s10, SBI_TRAP_REGS_OFFSET(s10)(a0)
  655. REG_L s11, SBI_TRAP_REGS_OFFSET(s11)(a0)
  656. REG_L t3, SBI_TRAP_REGS_OFFSET(t3)(a0)
  657. REG_L t4, SBI_TRAP_REGS_OFFSET(t4)(a0)
  658. REG_L t5, SBI_TRAP_REGS_OFFSET(t5)(a0)
  659. REG_L t6, SBI_TRAP_REGS_OFFSET(t6)(a0)
  660. .endm
  661. .macro TRAP_RESTORE_MEPC_MSTATUS have_mstatush
  662. /* Restore MEPC and MSTATUS CSRs */
  663. REG_L t0, SBI_TRAP_REGS_OFFSET(mepc)(a0)
  664. csrw CSR_MEPC, t0
  665. REG_L t0, SBI_TRAP_REGS_OFFSET(mstatus)(a0)
  666. csrw CSR_MSTATUS, t0
  667. .if \have_mstatush
  668. REG_L t0, SBI_TRAP_REGS_OFFSET(mstatusH)(a0)
  669. csrw CSR_MSTATUSH, t0
  670. .endif
  671. .endm
  672. .macro TRAP_RESTORE_A0_T0
  673. /* Restore T0 */
  674. REG_L t0, SBI_TRAP_REGS_OFFSET(t0)(a0)
  675. /* Restore A0 */
  676. REG_L a0, SBI_TRAP_REGS_OFFSET(a0)(a0)
  677. .endm
  678. .section .entry, "ax", %progbits
  679. .align 3
  680. .globl _trap_handler
  681. .globl _trap_exit
  682. _trap_handler:
  683. TRAP_SAVE_AND_SETUP_SP_T0
  684. TRAP_SAVE_MEPC_MSTATUS 0
  685. TRAP_SAVE_GENERAL_REGS_EXCEPT_SP_T0
  686. TRAP_CALL_C_ROUTINE
  687. _trap_exit:
  688. TRAP_RESTORE_GENERAL_REGS_EXCEPT_A0_T0
  689. TRAP_RESTORE_MEPC_MSTATUS 0
  690. TRAP_RESTORE_A0_T0
  691. mret
  692. #if __riscv_xlen == 32
  693. .section .entry, "ax", %progbits
  694. .align 3
  695. .globl _trap_handler_rv32_hyp
  696. .globl _trap_exit_rv32_hyp
  697. _trap_handler_rv32_hyp:
  698. TRAP_SAVE_AND_SETUP_SP_T0
  699. TRAP_SAVE_MEPC_MSTATUS 1
  700. TRAP_SAVE_GENERAL_REGS_EXCEPT_SP_T0
  701. TRAP_CALL_C_ROUTINE
  702. _trap_exit_rv32_hyp:
  703. TRAP_RESTORE_GENERAL_REGS_EXCEPT_A0_T0
  704. TRAP_RESTORE_MEPC_MSTATUS 1
  705. TRAP_RESTORE_A0_T0
  706. mret
  707. #endif
  708. .section .entry, "ax", %progbits
  709. .align 3
  710. .globl _reset_regs
  711. _reset_regs:
  712. /* flush the instruction cache */
  713. fence.i
  714. /* Reset all registers except ra, a0, a1 and a2 */
  715. li sp, 0
  716. li gp, 0
  717. li tp, 0
  718. li t0, 0
  719. li t1, 0
  720. li t2, 0
  721. li s0, 0
  722. li s1, 0
  723. li a3, 0
  724. li a4, 0
  725. li a5, 0
  726. li a6, 0
  727. li a7, 0
  728. li s2, 0
  729. li s3, 0
  730. li s4, 0
  731. li s5, 0
  732. li s6, 0
  733. li s7, 0
  734. li s8, 0
  735. li s9, 0
  736. li s10, 0
  737. li s11, 0
  738. li t3, 0
  739. li t4, 0
  740. li t5, 0
  741. li t6, 0
  742. csrw CSR_MSCRATCH, 0
  743. ret
  744. #ifdef FW_FDT_PATH
  745. .section .rodata
  746. .align 4
  747. .globl fw_fdt_bin
  748. fw_fdt_bin:
  749. .incbin FW_FDT_PATH
  750. #ifdef FW_FDT_PADDING
  751. .fill FW_FDT_PADDING, 1, 0
  752. #endif
  753. #endif