sbi_unpriv.c 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165
  1. /*
  2. * SPDX-License-Identifier: BSD-2-Clause
  3. *
  4. * Copyright (c) 2019 Western Digital Corporation or its affiliates.
  5. *
  6. * Authors:
  7. * Anup Patel <anup.patel@wdc.com>
  8. */
  9. #include <sbi/riscv_encoding.h>
  10. #include <sbi/sbi_bitops.h>
  11. #include <sbi/sbi_hart.h>
  12. #include <sbi/sbi_scratch.h>
  13. #include <sbi/sbi_trap.h>
  14. #include <sbi/sbi_unpriv.h>
  15. /**
  16. * a3 must a pointer to the sbi_trap_info and a4 is used as a temporary
  17. * register in the trap handler. Make sure that compiler doesn't use a3 & a4.
  18. */
  19. #define DEFINE_UNPRIVILEGED_LOAD_FUNCTION(type, insn) \
  20. type sbi_load_##type(const type *addr, \
  21. struct sbi_trap_info *trap) \
  22. { \
  23. register ulong tinfo asm("a3"); \
  24. register ulong mstatus = 0; \
  25. register ulong mtvec = sbi_hart_expected_trap_addr(); \
  26. type ret = 0; \
  27. trap->cause = 0; \
  28. asm volatile( \
  29. "add %[tinfo], %[taddr], zero\n" \
  30. "csrrw %[mtvec], " STR(CSR_MTVEC) ", %[mtvec]\n" \
  31. "csrrs %[mstatus], " STR(CSR_MSTATUS) ", %[mprv]\n" \
  32. ".option push\n" \
  33. ".option norvc\n" \
  34. #insn " %[ret], %[addr]\n" \
  35. ".option pop\n" \
  36. "csrw " STR(CSR_MSTATUS) ", %[mstatus]\n" \
  37. "csrw " STR(CSR_MTVEC) ", %[mtvec]" \
  38. : [mstatus] "+&r"(mstatus), [mtvec] "+&r"(mtvec), \
  39. [tinfo] "+&r"(tinfo), [ret] "=&r"(ret) \
  40. : [addr] "m"(*addr), [mprv] "r"(MSTATUS_MPRV), \
  41. [taddr] "r"((ulong)trap) \
  42. : "a4", "memory"); \
  43. return ret; \
  44. }
  45. #define DEFINE_UNPRIVILEGED_STORE_FUNCTION(type, insn) \
  46. void sbi_store_##type(type *addr, type val, \
  47. struct sbi_trap_info *trap) \
  48. { \
  49. register ulong tinfo asm("a3") = (ulong)trap; \
  50. register ulong mstatus = 0; \
  51. register ulong mtvec = sbi_hart_expected_trap_addr(); \
  52. trap->cause = 0; \
  53. asm volatile( \
  54. "add %[tinfo], %[taddr], zero\n" \
  55. "csrrw %[mtvec], " STR(CSR_MTVEC) ", %[mtvec]\n" \
  56. "csrrs %[mstatus], " STR(CSR_MSTATUS) ", %[mprv]\n" \
  57. ".option push\n" \
  58. ".option norvc\n" \
  59. #insn " %[val], %[addr]\n" \
  60. ".option pop\n" \
  61. "csrw " STR(CSR_MSTATUS) ", %[mstatus]\n" \
  62. "csrw " STR(CSR_MTVEC) ", %[mtvec]" \
  63. : [mstatus] "+&r"(mstatus), [mtvec] "+&r"(mtvec), \
  64. [tinfo] "+&r"(tinfo) \
  65. : [addr] "m"(*addr), [mprv] "r"(MSTATUS_MPRV), \
  66. [val] "r"(val), [taddr] "r"((ulong)trap) \
  67. : "a4", "memory"); \
  68. }
  69. DEFINE_UNPRIVILEGED_LOAD_FUNCTION(u8, lbu)
  70. DEFINE_UNPRIVILEGED_LOAD_FUNCTION(u16, lhu)
  71. DEFINE_UNPRIVILEGED_LOAD_FUNCTION(s8, lb)
  72. DEFINE_UNPRIVILEGED_LOAD_FUNCTION(s16, lh)
  73. DEFINE_UNPRIVILEGED_LOAD_FUNCTION(s32, lw)
  74. DEFINE_UNPRIVILEGED_STORE_FUNCTION(u8, sb)
  75. DEFINE_UNPRIVILEGED_STORE_FUNCTION(u16, sh)
  76. DEFINE_UNPRIVILEGED_STORE_FUNCTION(u32, sw)
  77. #if __riscv_xlen == 64
  78. DEFINE_UNPRIVILEGED_LOAD_FUNCTION(u32, lwu)
  79. DEFINE_UNPRIVILEGED_LOAD_FUNCTION(u64, ld)
  80. DEFINE_UNPRIVILEGED_STORE_FUNCTION(u64, sd)
  81. DEFINE_UNPRIVILEGED_LOAD_FUNCTION(ulong, ld)
  82. #else
  83. DEFINE_UNPRIVILEGED_LOAD_FUNCTION(u32, lw)
  84. DEFINE_UNPRIVILEGED_LOAD_FUNCTION(ulong, lw)
  85. u64 sbi_load_u64(const u64 *addr,
  86. struct sbi_trap_info *trap)
  87. {
  88. u64 ret = sbi_load_u32((u32 *)addr, trap);
  89. if (trap->cause)
  90. return 0;
  91. ret |= ((u64)sbi_load_u32((u32 *)addr + 1, trap) << 32);
  92. if (trap->cause)
  93. return 0;
  94. return ret;
  95. }
  96. void sbi_store_u64(u64 *addr, u64 val,
  97. struct sbi_trap_info *trap)
  98. {
  99. sbi_store_u32((u32 *)addr, val, trap);
  100. if (trap->cause)
  101. return;
  102. sbi_store_u32((u32 *)addr + 1, val >> 32, trap);
  103. if (trap->cause)
  104. return;
  105. }
  106. #endif
  107. ulong sbi_get_insn(ulong mepc, struct sbi_trap_info *trap)
  108. {
  109. register ulong tinfo asm("a3");
  110. register ulong ttmp asm("a4");
  111. register ulong mstatus = 0;
  112. register ulong mtvec = sbi_hart_expected_trap_addr();
  113. ulong insn = 0;
  114. trap->cause = 0;
  115. asm volatile(
  116. "add %[tinfo], %[taddr], zero\n"
  117. "csrrw %[mtvec], " STR(CSR_MTVEC) ", %[mtvec]\n"
  118. "csrrs %[mstatus], " STR(CSR_MSTATUS) ", %[mprv]\n"
  119. "lhu %[insn], (%[addr])\n"
  120. "andi %[ttmp], %[insn], 3\n"
  121. "addi %[ttmp], %[ttmp], -3\n"
  122. "bne %[ttmp], zero, 2f\n"
  123. "lhu %[ttmp], 2(%[addr])\n"
  124. "sll %[ttmp], %[ttmp], 16\n"
  125. "add %[insn], %[insn], %[ttmp]\n"
  126. "2: csrw " STR(CSR_MSTATUS) ", %[mstatus]\n"
  127. "csrw " STR(CSR_MTVEC) ", %[mtvec]"
  128. : [mstatus] "+&r"(mstatus), [mtvec] "+&r"(mtvec),
  129. [tinfo] "+&r"(tinfo), [ttmp] "+&r"(ttmp),
  130. [insn] "=&r"(insn)
  131. : [mprv] "r"(MSTATUS_MPRV | MSTATUS_MXR),
  132. [taddr] "r"((ulong)trap), [addr] "r"(mepc)
  133. : "memory");
  134. switch (trap->cause) {
  135. case CAUSE_LOAD_ACCESS:
  136. trap->cause = CAUSE_FETCH_ACCESS;
  137. trap->tval = mepc;
  138. break;
  139. case CAUSE_LOAD_PAGE_FAULT:
  140. trap->cause = CAUSE_FETCH_PAGE_FAULT;
  141. trap->tval = mepc;
  142. break;
  143. case CAUSE_LOAD_GUEST_PAGE_FAULT:
  144. trap->cause = CAUSE_FETCH_GUEST_PAGE_FAULT;
  145. trap->tval = mepc;
  146. break;
  147. default:
  148. break;
  149. };
  150. return insn;
  151. }