fw_base.S 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643
  1. /*
  2. * SPDX-License-Identifier: BSD-2-Clause
  3. *
  4. * Copyright (c) 2019 Western Digital Corporation or its affiliates.
  5. *
  6. * Authors:
  7. * Anup Patel <anup.patel@wdc.com>
  8. */
  9. #include <sbi/riscv_asm.h>
  10. #include <sbi/riscv_encoding.h>
  11. #include <sbi/sbi_platform.h>
  12. #include <sbi/sbi_scratch.h>
  13. #include <sbi/sbi_trap.h>
  14. #define BOOT_STATUS_RELOCATE_DONE 1
  15. #define BOOT_STATUS_BOOT_HART_DONE 2
  16. .macro MOV_3R __d0, __s0, __d1, __s1, __d2, __s2
  17. add \__d0, \__s0, zero
  18. add \__d1, \__s1, zero
  19. add \__d2, \__s2, zero
  20. .endm
  21. .macro MOV_5R __d0, __s0, __d1, __s1, __d2, __s2, __d3, __s3, __d4, __s4
  22. add \__d0, \__s0, zero
  23. add \__d1, \__s1, zero
  24. add \__d2, \__s2, zero
  25. add \__d3, \__s3, zero
  26. add \__d4, \__s4, zero
  27. .endm
  28. /*
  29. * If __start_reg <= __check_reg and __check_reg < __end_reg then
  30. * jump to __pass
  31. */
  32. .macro BRANGE __start_reg, __end_reg, __check_reg, __jump_lable
  33. blt \__check_reg, \__start_reg, 999f
  34. bge \__check_reg, \__end_reg, 999f
  35. j \__jump_lable
  36. 999:
  37. .endm
  38. .section .entry, "ax", %progbits
  39. .align 3
  40. .globl _start
  41. .globl _start_warm
  42. _start:
  43. /* Find preferred boot HART id */
  44. MOV_3R s0, a0, s1, a1, s2, a2
  45. call fw_boot_hart
  46. add a6, a0, zero
  47. MOV_3R a0, s0, a1, s1, a2, s2
  48. li a7, -1
  49. beq a6, a7, _try_lottery
  50. /* Jump to relocation wait loop if we are not boot hart */
  51. bne a0, a6, _wait_relocate_copy_done
  52. _try_lottery:
  53. /* Jump to relocation wait loop if we don't get relocation lottery */
  54. la a6, _relocate_lottery
  55. li a7, 1
  56. amoadd.w a6, a7, (a6)
  57. bnez a6, _wait_relocate_copy_done
  58. /* Save load address */
  59. la t0, _load_start
  60. la t1, _start
  61. REG_S t1, 0(t0)
  62. /* Relocate if load address != link address */
  63. _relocate:
  64. la t0, _link_start
  65. REG_L t0, 0(t0)
  66. la t1, _link_end
  67. REG_L t1, 0(t1)
  68. la t2, _load_start
  69. REG_L t2, 0(t2)
  70. sub t3, t1, t0
  71. add t3, t3, t2
  72. beq t0, t2, _relocate_done
  73. la t4, _relocate_done
  74. sub t4, t4, t2
  75. add t4, t4, t0
  76. blt t2, t0, _relocate_copy_to_upper
  77. _relocate_copy_to_lower:
  78. ble t1, t2, _relocate_copy_to_lower_loop
  79. la t3, _relocate_lottery
  80. BRANGE t2, t1, t3, _start_hang
  81. la t3, _boot_status
  82. BRANGE t2, t1, t3, _start_hang
  83. la t3, _relocate
  84. la t5, _relocate_done
  85. BRANGE t2, t1, t3, _start_hang
  86. BRANGE t2, t1, t5, _start_hang
  87. BRANGE t3, t5, t2, _start_hang
  88. _relocate_copy_to_lower_loop:
  89. REG_L t3, 0(t2)
  90. REG_S t3, 0(t0)
  91. add t0, t0, __SIZEOF_POINTER__
  92. add t2, t2, __SIZEOF_POINTER__
  93. blt t0, t1, _relocate_copy_to_lower_loop
  94. jr t4
  95. _relocate_copy_to_upper:
  96. ble t3, t0, _relocate_copy_to_upper_loop
  97. la t2, _relocate_lottery
  98. BRANGE t0, t3, t2, _start_hang
  99. la t2, _boot_status
  100. BRANGE t0, t3, t2, _start_hang
  101. la t2, _relocate
  102. la t5, _relocate_done
  103. BRANGE t0, t3, t2, _start_hang
  104. BRANGE t0, t3, t5, _start_hang
  105. BRANGE t2, t5, t0, _start_hang
  106. _relocate_copy_to_upper_loop:
  107. add t3, t3, -__SIZEOF_POINTER__
  108. add t1, t1, -__SIZEOF_POINTER__
  109. REG_L t2, 0(t3)
  110. REG_S t2, 0(t1)
  111. blt t0, t1, _relocate_copy_to_upper_loop
  112. jr t4
  113. _wait_relocate_copy_done:
  114. la t0, _start
  115. la t1, _link_start
  116. REG_L t1, 0(t1)
  117. beq t0, t1, _wait_for_boot_hart
  118. la t2, _boot_status
  119. la t3, _wait_for_boot_hart
  120. sub t3, t3, t0
  121. add t3, t3, t1
  122. 1:
  123. /* waitting for relocate copy done (_boot_status == 1) */
  124. li t4, BOOT_STATUS_RELOCATE_DONE
  125. REG_L t5, 0(t2)
  126. /* Reduce the bus traffic so that boot hart may proceed faster */
  127. nop
  128. nop
  129. nop
  130. bgt t4, t5, 1b
  131. jr t3
  132. _relocate_done:
  133. /*
  134. * Mark relocate copy done
  135. * Use _boot_status copy relative to the load address
  136. */
  137. la t0, _boot_status
  138. la t1, _link_start
  139. REG_L t1, 0(t1)
  140. la t2, _load_start
  141. REG_L t2, 0(t2)
  142. sub t0, t0, t1
  143. add t0, t0, t2
  144. li t1, BOOT_STATUS_RELOCATE_DONE
  145. REG_S t1, 0(t0)
  146. fence rw, rw
  147. /* At this point we are running from link address */
  148. /* Reset all registers for boot HART */
  149. li ra, 0
  150. call _reset_regs
  151. /* Allow main firmware to save info */
  152. MOV_5R s0, a0, s1, a1, s2, a2, s3, a3, s4, a4
  153. call fw_save_info
  154. MOV_5R a0, s0, a1, s1, a2, s2, a3, s3, a4, s4
  155. /* Preload HART details
  156. * s7 -> HART Count
  157. * s8 -> HART Stack Size
  158. */
  159. la a4, platform
  160. #if __riscv_xlen == 64
  161. lwu s7, SBI_PLATFORM_HART_COUNT_OFFSET(a4)
  162. lwu s8, SBI_PLATFORM_HART_STACK_SIZE_OFFSET(a4)
  163. #else
  164. lw s7, SBI_PLATFORM_HART_COUNT_OFFSET(a4)
  165. lw s8, SBI_PLATFORM_HART_STACK_SIZE_OFFSET(a4)
  166. #endif
  167. /* Setup scratch space for all the HARTs*/
  168. la tp, _fw_end
  169. mul a5, s7, s8
  170. add tp, tp, a5
  171. /* Keep a copy of tp */
  172. add t3, tp, zero
  173. /* Counter */
  174. li t2, 1
  175. /* hartid 0 is mandated by ISA */
  176. li t1, 0
  177. _scratch_init:
  178. add tp, t3, zero
  179. mul a5, s8, t1
  180. sub tp, tp, a5
  181. li a5, SBI_SCRATCH_SIZE
  182. sub tp, tp, a5
  183. /* Initialize scratch space */
  184. /* Store fw_start and fw_size in scratch space */
  185. la a4, _fw_start
  186. la a5, _fw_end
  187. mul t0, s7, s8
  188. add a5, a5, t0
  189. sub a5, a5, a4
  190. REG_S a4, SBI_SCRATCH_FW_START_OFFSET(tp)
  191. REG_S a5, SBI_SCRATCH_FW_SIZE_OFFSET(tp)
  192. /* Store next arg1 in scratch space */
  193. MOV_3R s0, a0, s1, a1, s2, a2
  194. call fw_next_arg1
  195. REG_S a0, SBI_SCRATCH_NEXT_ARG1_OFFSET(tp)
  196. MOV_3R a0, s0, a1, s1, a2, s2
  197. /* Store next address in scratch space */
  198. MOV_3R s0, a0, s1, a1, s2, a2
  199. call fw_next_addr
  200. REG_S a0, SBI_SCRATCH_NEXT_ADDR_OFFSET(tp)
  201. MOV_3R a0, s0, a1, s1, a2, s2
  202. /* Store next mode in scratch space */
  203. MOV_3R s0, a0, s1, a1, s2, a2
  204. call fw_next_mode
  205. REG_S a0, SBI_SCRATCH_NEXT_MODE_OFFSET(tp)
  206. MOV_3R a0, s0, a1, s1, a2, s2
  207. /* Store warm_boot address in scratch space */
  208. la a4, _start_warm
  209. REG_S a4, SBI_SCRATCH_WARMBOOT_ADDR_OFFSET(tp)
  210. /* Store platform address in scratch space */
  211. la a4, platform
  212. REG_S a4, SBI_SCRATCH_PLATFORM_ADDR_OFFSET(tp)
  213. /* Store hartid-to-scratch function address in scratch space */
  214. la a4, _hartid_to_scratch
  215. REG_S a4, SBI_SCRATCH_HARTID_TO_SCRATCH_OFFSET(tp)
  216. /* Clear tmp0 in scratch space */
  217. REG_S zero, SBI_SCRATCH_TMP0_OFFSET(tp)
  218. /* Store firmware options in scratch space */
  219. MOV_3R s0, a0, s1, a1, s2, a2
  220. #ifdef FW_OPTIONS
  221. li a4, FW_OPTIONS
  222. #else
  223. add a4, zero, zero
  224. #endif
  225. call fw_options
  226. or a4, a4, a0
  227. REG_S a4, SBI_SCRATCH_OPTIONS_OFFSET(tp)
  228. MOV_3R a0, s0, a1, s1, a2, s2
  229. /* Move to next scratch space */
  230. add t1, t1, t2
  231. blt t1, s7, _scratch_init
  232. /* Zero-out BSS */
  233. la a4, _bss_start
  234. la a5, _bss_end
  235. _bss_zero:
  236. REG_S zero, (a4)
  237. add a4, a4, __SIZEOF_POINTER__
  238. blt a4, a5, _bss_zero
  239. /* Override pervious arg1 */
  240. MOV_3R s0, a0, s1, a1, s2, a2
  241. call fw_prev_arg1
  242. add t1, a0, zero
  243. MOV_3R a0, s0, a1, s1, a2, s2
  244. beqz t1, _prev_arg1_override_done
  245. add a1, t1, zero
  246. _prev_arg1_override_done:
  247. /*
  248. * Relocate Flatened Device Tree (FDT)
  249. * source FDT address = previous arg1
  250. * destination FDT address = next arg1
  251. *
  252. * Note: We will preserve a0 and a1 passed by
  253. * previous booting stage.
  254. */
  255. beqz a1, _fdt_reloc_done
  256. /* Mask values in a3 and a4 */
  257. li a3, ~(__SIZEOF_POINTER__ - 1)
  258. li a4, 0xff
  259. /* t1 = destination FDT start address */
  260. MOV_3R s0, a0, s1, a1, s2, a2
  261. call fw_next_arg1
  262. add t1, a0, zero
  263. MOV_3R a0, s0, a1, s1, a2, s2
  264. beqz t1, _fdt_reloc_done
  265. beq t1, a1, _fdt_reloc_done
  266. and t1, t1, a3
  267. /* t0 = source FDT start address */
  268. add t0, a1, zero
  269. and t0, t0, a3
  270. /* t2 = source FDT size in big-endian */
  271. #if __riscv_xlen == 64
  272. lwu t2, 4(t0)
  273. #else
  274. lw t2, 4(t0)
  275. #endif
  276. /* t3 = bit[15:8] of FDT size */
  277. add t3, t2, zero
  278. srli t3, t3, 16
  279. and t3, t3, a4
  280. slli t3, t3, 8
  281. /* t4 = bit[23:16] of FDT size */
  282. add t4, t2, zero
  283. srli t4, t4, 8
  284. and t4, t4, a4
  285. slli t4, t4, 16
  286. /* t5 = bit[31:24] of FDT size */
  287. add t5, t2, zero
  288. and t5, t5, a4
  289. slli t5, t5, 24
  290. /* t2 = bit[7:0] of FDT size */
  291. srli t2, t2, 24
  292. and t2, t2, a4
  293. /* t2 = FDT size in little-endian */
  294. or t2, t2, t3
  295. or t2, t2, t4
  296. or t2, t2, t5
  297. /* t2 = destination FDT end address */
  298. add t2, t1, t2
  299. /* FDT copy loop */
  300. ble t2, t1, _fdt_reloc_done
  301. _fdt_reloc_again:
  302. REG_L t3, 0(t0)
  303. REG_S t3, 0(t1)
  304. add t0, t0, __SIZEOF_POINTER__
  305. add t1, t1, __SIZEOF_POINTER__
  306. blt t1, t2, _fdt_reloc_again
  307. _fdt_reloc_done:
  308. /* mark boot hart done */
  309. li t0, BOOT_STATUS_BOOT_HART_DONE
  310. la t1, _boot_status
  311. REG_S t0, 0(t1)
  312. fence rw, rw
  313. j _start_warm
  314. /* waitting for boot hart done (_boot_status == 2) */
  315. _wait_for_boot_hart:
  316. li t0, BOOT_STATUS_BOOT_HART_DONE
  317. la t1, _boot_status
  318. REG_L t1, 0(t1)
  319. /* Reduce the bus traffic so that boot hart may proceed faster */
  320. nop
  321. nop
  322. nop
  323. bne t0, t1, _wait_for_boot_hart
  324. _start_warm:
  325. /* Reset all registers for non-boot HARTs */
  326. li ra, 0
  327. call _reset_regs
  328. /* Disable and clear all interrupts */
  329. csrw CSR_MIE, zero
  330. csrw CSR_MIP, zero
  331. la a4, platform
  332. #if __riscv_xlen == 64
  333. lwu s7, SBI_PLATFORM_HART_COUNT_OFFSET(a4)
  334. lwu s8, SBI_PLATFORM_HART_STACK_SIZE_OFFSET(a4)
  335. #else
  336. lw s7, SBI_PLATFORM_HART_COUNT_OFFSET(a4)
  337. lw s8, SBI_PLATFORM_HART_STACK_SIZE_OFFSET(a4)
  338. #endif
  339. /* HART ID should be within expected limit */
  340. csrr s6, CSR_MHARTID
  341. bge s6, s7, _start_hang
  342. /* find the scratch space for this hart */
  343. la tp, _fw_end
  344. mul a5, s7, s8
  345. add tp, tp, a5
  346. mul a5, s8, s6
  347. sub tp, tp, a5
  348. li a5, SBI_SCRATCH_SIZE
  349. sub tp, tp, a5
  350. /* update the mscratch */
  351. csrw CSR_MSCRATCH, tp
  352. /* Setup stack */
  353. add sp, tp, zero
  354. /* Setup trap handler */
  355. la a4, _trap_handler
  356. csrw CSR_MTVEC, a4
  357. /* Make sure that mtvec is updated */
  358. 1: csrr a5, CSR_MTVEC
  359. bne a4, a5, 1b
  360. /* Initialize SBI runtime */
  361. csrr a0, CSR_MSCRATCH
  362. call sbi_init
  363. /* We don't expect to reach here hence just hang */
  364. j _start_hang
  365. .align 3
  366. _relocate_lottery:
  367. RISCV_PTR 0
  368. _boot_status:
  369. RISCV_PTR 0
  370. _load_start:
  371. RISCV_PTR _fw_start
  372. _link_start:
  373. RISCV_PTR _fw_start
  374. _link_end:
  375. RISCV_PTR _fw_reloc_end
  376. .section .entry, "ax", %progbits
  377. .align 3
  378. .globl _hartid_to_scratch
  379. _hartid_to_scratch:
  380. add sp, sp, -(3 * __SIZEOF_POINTER__)
  381. REG_S s0, (sp)
  382. REG_S s1, (__SIZEOF_POINTER__)(sp)
  383. REG_S s2, (__SIZEOF_POINTER__ * 2)(sp)
  384. /*
  385. * a0 -> HART ID (passed by caller)
  386. * s0 -> HART Stack Size
  387. * s1 -> HART Stack End
  388. * s2 -> Temporary
  389. */
  390. la s2, platform
  391. #if __riscv_xlen == 64
  392. lwu s0, SBI_PLATFORM_HART_STACK_SIZE_OFFSET(s2)
  393. lwu s2, SBI_PLATFORM_HART_COUNT_OFFSET(s2)
  394. #else
  395. lw s0, SBI_PLATFORM_HART_STACK_SIZE_OFFSET(s2)
  396. lw s2, SBI_PLATFORM_HART_COUNT_OFFSET(s2)
  397. #endif
  398. mul s2, s2, s0
  399. la s1, _fw_end
  400. add s1, s1, s2
  401. mul s2, s0, a0
  402. sub s1, s1, s2
  403. li s2, SBI_SCRATCH_SIZE
  404. sub a0, s1, s2
  405. REG_L s0, (sp)
  406. REG_L s1, (__SIZEOF_POINTER__)(sp)
  407. REG_L s2, (__SIZEOF_POINTER__ * 2)(sp)
  408. add sp, sp, (3 * __SIZEOF_POINTER__)
  409. ret
  410. .section .entry, "ax", %progbits
  411. .align 3
  412. .globl _start_hang
  413. _start_hang:
  414. wfi
  415. j _start_hang
  416. .section .entry, "ax", %progbits
  417. .align 3
  418. .globl _trap_handler
  419. _trap_handler:
  420. /* Swap TP and MSCRATCH */
  421. csrrw tp, CSR_MSCRATCH, tp
  422. /* Save T0 in scratch space */
  423. REG_S t0, SBI_SCRATCH_TMP0_OFFSET(tp)
  424. /* Check which mode we came from */
  425. csrr t0, CSR_MSTATUS
  426. srl t0, t0, MSTATUS_MPP_SHIFT
  427. and t0, t0, PRV_M
  428. xori t0, t0, PRV_M
  429. beq t0, zero, _trap_handler_m_mode
  430. /* We came from S-mode or U-mode */
  431. _trap_handler_s_mode:
  432. /* Set T0 to original SP */
  433. add t0, sp, zero
  434. /* Setup exception stack */
  435. add sp, tp, -(SBI_TRAP_REGS_SIZE)
  436. /* Jump to code common for all modes */
  437. j _trap_handler_all_mode
  438. /* We came from M-mode */
  439. _trap_handler_m_mode:
  440. /* Set T0 to original SP */
  441. add t0, sp, zero
  442. /* Re-use current SP as exception stack */
  443. add sp, sp, -(SBI_TRAP_REGS_SIZE)
  444. _trap_handler_all_mode:
  445. /* Save original SP (from T0) on stack */
  446. REG_S t0, SBI_TRAP_REGS_OFFSET(sp)(sp)
  447. /* Restore T0 from scratch space */
  448. REG_L t0, SBI_SCRATCH_TMP0_OFFSET(tp)
  449. /* Save T0 on stack */
  450. REG_S t0, SBI_TRAP_REGS_OFFSET(t0)(sp)
  451. /* Swap TP and MSCRATCH */
  452. csrrw tp, CSR_MSCRATCH, tp
  453. /* Save MEPC and MSTATUS CSRs */
  454. csrr t0, CSR_MEPC
  455. REG_S t0, SBI_TRAP_REGS_OFFSET(mepc)(sp)
  456. csrr t0, CSR_MSTATUS
  457. REG_S t0, SBI_TRAP_REGS_OFFSET(mstatus)(sp)
  458. REG_S zero, SBI_TRAP_REGS_OFFSET(mstatusH)(sp)
  459. #if __riscv_xlen == 32
  460. csrr t0, CSR_MISA
  461. srli t0, t0, ('H' - 'A')
  462. andi t0, t0, 0x1
  463. beq t0, zero, _skip_mstatush_save
  464. csrr t0, CSR_MSTATUSH
  465. REG_S t0, SBI_TRAP_REGS_OFFSET(mstatusH)(sp)
  466. _skip_mstatush_save:
  467. #endif
  468. /* Save all general regisers except SP and T0 */
  469. REG_S zero, SBI_TRAP_REGS_OFFSET(zero)(sp)
  470. REG_S ra, SBI_TRAP_REGS_OFFSET(ra)(sp)
  471. REG_S gp, SBI_TRAP_REGS_OFFSET(gp)(sp)
  472. REG_S tp, SBI_TRAP_REGS_OFFSET(tp)(sp)
  473. REG_S t1, SBI_TRAP_REGS_OFFSET(t1)(sp)
  474. REG_S t2, SBI_TRAP_REGS_OFFSET(t2)(sp)
  475. REG_S s0, SBI_TRAP_REGS_OFFSET(s0)(sp)
  476. REG_S s1, SBI_TRAP_REGS_OFFSET(s1)(sp)
  477. REG_S a0, SBI_TRAP_REGS_OFFSET(a0)(sp)
  478. REG_S a1, SBI_TRAP_REGS_OFFSET(a1)(sp)
  479. REG_S a2, SBI_TRAP_REGS_OFFSET(a2)(sp)
  480. REG_S a3, SBI_TRAP_REGS_OFFSET(a3)(sp)
  481. REG_S a4, SBI_TRAP_REGS_OFFSET(a4)(sp)
  482. REG_S a5, SBI_TRAP_REGS_OFFSET(a5)(sp)
  483. REG_S a6, SBI_TRAP_REGS_OFFSET(a6)(sp)
  484. REG_S a7, SBI_TRAP_REGS_OFFSET(a7)(sp)
  485. REG_S s2, SBI_TRAP_REGS_OFFSET(s2)(sp)
  486. REG_S s3, SBI_TRAP_REGS_OFFSET(s3)(sp)
  487. REG_S s4, SBI_TRAP_REGS_OFFSET(s4)(sp)
  488. REG_S s5, SBI_TRAP_REGS_OFFSET(s5)(sp)
  489. REG_S s6, SBI_TRAP_REGS_OFFSET(s6)(sp)
  490. REG_S s7, SBI_TRAP_REGS_OFFSET(s7)(sp)
  491. REG_S s8, SBI_TRAP_REGS_OFFSET(s8)(sp)
  492. REG_S s9, SBI_TRAP_REGS_OFFSET(s9)(sp)
  493. REG_S s10, SBI_TRAP_REGS_OFFSET(s10)(sp)
  494. REG_S s11, SBI_TRAP_REGS_OFFSET(s11)(sp)
  495. REG_S t3, SBI_TRAP_REGS_OFFSET(t3)(sp)
  496. REG_S t4, SBI_TRAP_REGS_OFFSET(t4)(sp)
  497. REG_S t5, SBI_TRAP_REGS_OFFSET(t5)(sp)
  498. REG_S t6, SBI_TRAP_REGS_OFFSET(t6)(sp)
  499. /* Call C routine */
  500. add a0, sp, zero
  501. csrr a1, CSR_MSCRATCH
  502. call sbi_trap_handler
  503. /* Restore all general regisers except SP and T0 */
  504. REG_L ra, SBI_TRAP_REGS_OFFSET(ra)(sp)
  505. REG_L gp, SBI_TRAP_REGS_OFFSET(gp)(sp)
  506. REG_L tp, SBI_TRAP_REGS_OFFSET(tp)(sp)
  507. REG_L t1, SBI_TRAP_REGS_OFFSET(t1)(sp)
  508. REG_L t2, SBI_TRAP_REGS_OFFSET(t2)(sp)
  509. REG_L s0, SBI_TRAP_REGS_OFFSET(s0)(sp)
  510. REG_L s1, SBI_TRAP_REGS_OFFSET(s1)(sp)
  511. REG_L a0, SBI_TRAP_REGS_OFFSET(a0)(sp)
  512. REG_L a1, SBI_TRAP_REGS_OFFSET(a1)(sp)
  513. REG_L a2, SBI_TRAP_REGS_OFFSET(a2)(sp)
  514. REG_L a3, SBI_TRAP_REGS_OFFSET(a3)(sp)
  515. REG_L a4, SBI_TRAP_REGS_OFFSET(a4)(sp)
  516. REG_L a5, SBI_TRAP_REGS_OFFSET(a5)(sp)
  517. REG_L a6, SBI_TRAP_REGS_OFFSET(a6)(sp)
  518. REG_L a7, SBI_TRAP_REGS_OFFSET(a7)(sp)
  519. REG_L s2, SBI_TRAP_REGS_OFFSET(s2)(sp)
  520. REG_L s3, SBI_TRAP_REGS_OFFSET(s3)(sp)
  521. REG_L s4, SBI_TRAP_REGS_OFFSET(s4)(sp)
  522. REG_L s5, SBI_TRAP_REGS_OFFSET(s5)(sp)
  523. REG_L s6, SBI_TRAP_REGS_OFFSET(s6)(sp)
  524. REG_L s7, SBI_TRAP_REGS_OFFSET(s7)(sp)
  525. REG_L s8, SBI_TRAP_REGS_OFFSET(s8)(sp)
  526. REG_L s9, SBI_TRAP_REGS_OFFSET(s9)(sp)
  527. REG_L s10, SBI_TRAP_REGS_OFFSET(s10)(sp)
  528. REG_L s11, SBI_TRAP_REGS_OFFSET(s11)(sp)
  529. REG_L t3, SBI_TRAP_REGS_OFFSET(t3)(sp)
  530. REG_L t4, SBI_TRAP_REGS_OFFSET(t4)(sp)
  531. REG_L t5, SBI_TRAP_REGS_OFFSET(t5)(sp)
  532. REG_L t6, SBI_TRAP_REGS_OFFSET(t6)(sp)
  533. /* Restore MEPC and MSTATUS CSRs */
  534. REG_L t0, SBI_TRAP_REGS_OFFSET(mepc)(sp)
  535. csrw CSR_MEPC, t0
  536. REG_L t0, SBI_TRAP_REGS_OFFSET(mstatus)(sp)
  537. csrw CSR_MSTATUS, t0
  538. #if __riscv_xlen == 32
  539. csrr t0, CSR_MISA
  540. srli t0, t0, ('H' - 'A')
  541. andi t0, t0, 0x1
  542. beq t0, zero, _skip_mstatush_restore
  543. REG_L t0, SBI_TRAP_REGS_OFFSET(mstatusH)(sp)
  544. csrw CSR_MSTATUSH, t0
  545. _skip_mstatush_restore:
  546. #endif
  547. /* Restore T0 */
  548. REG_L t0, SBI_TRAP_REGS_OFFSET(t0)(sp)
  549. /* Restore SP */
  550. REG_L sp, SBI_TRAP_REGS_OFFSET(sp)(sp)
  551. mret
  552. .section .entry, "ax", %progbits
  553. .align 3
  554. .globl _reset_regs
  555. _reset_regs:
  556. /* flush the instruction cache */
  557. fence.i
  558. /* Reset all registers except ra, a0, a1 and a2 */
  559. li sp, 0
  560. li gp, 0
  561. li tp, 0
  562. li t0, 0
  563. li t1, 0
  564. li t2, 0
  565. li s0, 0
  566. li s1, 0
  567. li a3, 0
  568. li a4, 0
  569. li a5, 0
  570. li a6, 0
  571. li a7, 0
  572. li s2, 0
  573. li s3, 0
  574. li s4, 0
  575. li s5, 0
  576. li s6, 0
  577. li s7, 0
  578. li s8, 0
  579. li s9, 0
  580. li s10, 0
  581. li s11, 0
  582. li t3, 0
  583. li t4, 0
  584. li t5, 0
  585. li t6, 0
  586. csrw CSR_MSCRATCH, 0
  587. ret