sbi_misaligned_ldst.c 6.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266
  1. /*
  2. * SPDX-License-Identifier: BSD-2-Clause
  3. *
  4. * Copyright (c) 2019 Western Digital Corporation or its affiliates.
  5. *
  6. * Authors:
  7. * Anup Patel <anup.patel@wdc.com>
  8. */
  9. #include <sbi/riscv_asm.h>
  10. #include <sbi/riscv_encoding.h>
  11. #include <sbi/riscv_fp.h>
  12. #include <sbi/sbi_error.h>
  13. #include <sbi/sbi_misaligned_ldst.h>
  14. #include <sbi/sbi_pmu.h>
  15. #include <sbi/sbi_trap.h>
  16. #include <sbi/sbi_unpriv.h>
  17. union reg_data {
  18. u8 data_bytes[8];
  19. ulong data_ulong;
  20. u64 data_u64;
  21. };
  22. static ulong sbi_misaligned_tinst_fixup(ulong orig_tinst, ulong new_tinst,
  23. ulong addr_offset)
  24. {
  25. if (new_tinst == INSN_PSEUDO_VS_LOAD ||
  26. new_tinst == INSN_PSEUDO_VS_STORE)
  27. return new_tinst;
  28. else if (orig_tinst == 0)
  29. return 0UL;
  30. else
  31. return orig_tinst | (addr_offset << SH_RS1);
  32. }
  33. int sbi_misaligned_load_handler(ulong addr, ulong tval2, ulong tinst,
  34. struct sbi_trap_regs *regs)
  35. {
  36. ulong insn, insn_len;
  37. union reg_data val;
  38. struct sbi_trap_info uptrap;
  39. int i, fp = 0, shift = 0, len = 0;
  40. sbi_pmu_ctr_incr_fw(SBI_PMU_FW_MISALIGNED_LOAD);
  41. if (tinst & 0x1) {
  42. /*
  43. * Bit[0] == 1 implies trapped instruction value is
  44. * transformed instruction or custom instruction.
  45. */
  46. insn = tinst | INSN_16BIT_MASK;
  47. insn_len = (tinst & 0x2) ? INSN_LEN(insn) : 2;
  48. } else {
  49. /*
  50. * Bit[0] == 0 implies trapped instruction value is
  51. * zero or special value.
  52. */
  53. insn = sbi_get_insn(regs->mepc, &uptrap);
  54. if (uptrap.cause) {
  55. uptrap.epc = regs->mepc;
  56. return sbi_trap_redirect(regs, &uptrap);
  57. }
  58. insn_len = INSN_LEN(insn);
  59. }
  60. if ((insn & INSN_MASK_LW) == INSN_MATCH_LW) {
  61. len = 4;
  62. shift = 8 * (sizeof(ulong) - len);
  63. #if __riscv_xlen == 64
  64. } else if ((insn & INSN_MASK_LD) == INSN_MATCH_LD) {
  65. len = 8;
  66. shift = 8 * (sizeof(ulong) - len);
  67. } else if ((insn & INSN_MASK_LWU) == INSN_MATCH_LWU) {
  68. len = 4;
  69. #endif
  70. #ifdef __riscv_flen
  71. } else if ((insn & INSN_MASK_FLD) == INSN_MATCH_FLD) {
  72. fp = 1;
  73. len = 8;
  74. } else if ((insn & INSN_MASK_FLW) == INSN_MATCH_FLW) {
  75. fp = 1;
  76. len = 4;
  77. #endif
  78. } else if ((insn & INSN_MASK_LH) == INSN_MATCH_LH) {
  79. len = 2;
  80. shift = 8 * (sizeof(ulong) - len);
  81. } else if ((insn & INSN_MASK_LHU) == INSN_MATCH_LHU) {
  82. len = 2;
  83. #if __riscv_xlen >= 64
  84. } else if ((insn & INSN_MASK_C_LD) == INSN_MATCH_C_LD) {
  85. len = 8;
  86. shift = 8 * (sizeof(ulong) - len);
  87. insn = RVC_RS2S(insn) << SH_RD;
  88. } else if ((insn & INSN_MASK_C_LDSP) == INSN_MATCH_C_LDSP &&
  89. ((insn >> SH_RD) & 0x1f)) {
  90. len = 8;
  91. shift = 8 * (sizeof(ulong) - len);
  92. #endif
  93. } else if ((insn & INSN_MASK_C_LW) == INSN_MATCH_C_LW) {
  94. len = 4;
  95. shift = 8 * (sizeof(ulong) - len);
  96. insn = RVC_RS2S(insn) << SH_RD;
  97. } else if ((insn & INSN_MASK_C_LWSP) == INSN_MATCH_C_LWSP &&
  98. ((insn >> SH_RD) & 0x1f)) {
  99. len = 4;
  100. shift = 8 * (sizeof(ulong) - len);
  101. #ifdef __riscv_flen
  102. } else if ((insn & INSN_MASK_C_FLD) == INSN_MATCH_C_FLD) {
  103. fp = 1;
  104. len = 8;
  105. insn = RVC_RS2S(insn) << SH_RD;
  106. } else if ((insn & INSN_MASK_C_FLDSP) == INSN_MATCH_C_FLDSP) {
  107. fp = 1;
  108. len = 8;
  109. #if __riscv_xlen == 32
  110. } else if ((insn & INSN_MASK_C_FLW) == INSN_MATCH_C_FLW) {
  111. fp = 1;
  112. len = 4;
  113. insn = RVC_RS2S(insn) << SH_RD;
  114. } else if ((insn & INSN_MASK_C_FLWSP) == INSN_MATCH_C_FLWSP) {
  115. fp = 1;
  116. len = 4;
  117. #endif
  118. #endif
  119. } else {
  120. uptrap.epc = regs->mepc;
  121. uptrap.cause = CAUSE_MISALIGNED_LOAD;
  122. uptrap.tval = addr;
  123. uptrap.tval2 = tval2;
  124. uptrap.tinst = tinst;
  125. uptrap.gva = sbi_regs_gva(regs);
  126. return sbi_trap_redirect(regs, &uptrap);
  127. }
  128. val.data_u64 = 0;
  129. for (i = 0; i < len; i++) {
  130. val.data_bytes[i] = sbi_load_u8((void *)(addr + i),
  131. &uptrap);
  132. if (uptrap.cause) {
  133. uptrap.epc = regs->mepc;
  134. uptrap.tinst = sbi_misaligned_tinst_fixup(
  135. tinst, uptrap.tinst, i);
  136. return sbi_trap_redirect(regs, &uptrap);
  137. }
  138. }
  139. if (!fp)
  140. SET_RD(insn, regs, ((long)(val.data_ulong << shift)) >> shift);
  141. #ifdef __riscv_flen
  142. else if (len == 8)
  143. SET_F64_RD(insn, regs, val.data_u64);
  144. else
  145. SET_F32_RD(insn, regs, val.data_ulong);
  146. #endif
  147. regs->mepc += insn_len;
  148. return 0;
  149. }
  150. int sbi_misaligned_store_handler(ulong addr, ulong tval2, ulong tinst,
  151. struct sbi_trap_regs *regs)
  152. {
  153. ulong insn, insn_len;
  154. union reg_data val;
  155. struct sbi_trap_info uptrap;
  156. int i, len = 0;
  157. sbi_pmu_ctr_incr_fw(SBI_PMU_FW_MISALIGNED_STORE);
  158. if (tinst & 0x1) {
  159. /*
  160. * Bit[0] == 1 implies trapped instruction value is
  161. * transformed instruction or custom instruction.
  162. */
  163. insn = tinst | INSN_16BIT_MASK;
  164. insn_len = (tinst & 0x2) ? INSN_LEN(insn) : 2;
  165. } else {
  166. /*
  167. * Bit[0] == 0 implies trapped instruction value is
  168. * zero or special value.
  169. */
  170. insn = sbi_get_insn(regs->mepc, &uptrap);
  171. if (uptrap.cause) {
  172. uptrap.epc = regs->mepc;
  173. return sbi_trap_redirect(regs, &uptrap);
  174. }
  175. insn_len = INSN_LEN(insn);
  176. }
  177. val.data_ulong = GET_RS2(insn, regs);
  178. if ((insn & INSN_MASK_SW) == INSN_MATCH_SW) {
  179. len = 4;
  180. #if __riscv_xlen == 64
  181. } else if ((insn & INSN_MASK_SD) == INSN_MATCH_SD) {
  182. len = 8;
  183. #endif
  184. #ifdef __riscv_flen
  185. } else if ((insn & INSN_MASK_FSD) == INSN_MATCH_FSD) {
  186. len = 8;
  187. val.data_u64 = GET_F64_RS2(insn, regs);
  188. } else if ((insn & INSN_MASK_FSW) == INSN_MATCH_FSW) {
  189. len = 4;
  190. val.data_ulong = GET_F32_RS2(insn, regs);
  191. #endif
  192. } else if ((insn & INSN_MASK_SH) == INSN_MATCH_SH) {
  193. len = 2;
  194. #if __riscv_xlen >= 64
  195. } else if ((insn & INSN_MASK_C_SD) == INSN_MATCH_C_SD) {
  196. len = 8;
  197. val.data_ulong = GET_RS2S(insn, regs);
  198. } else if ((insn & INSN_MASK_C_SDSP) == INSN_MATCH_C_SDSP &&
  199. ((insn >> SH_RD) & 0x1f)) {
  200. len = 8;
  201. val.data_ulong = GET_RS2C(insn, regs);
  202. #endif
  203. } else if ((insn & INSN_MASK_C_SW) == INSN_MATCH_C_SW) {
  204. len = 4;
  205. val.data_ulong = GET_RS2S(insn, regs);
  206. } else if ((insn & INSN_MASK_C_SWSP) == INSN_MATCH_C_SWSP &&
  207. ((insn >> SH_RD) & 0x1f)) {
  208. len = 4;
  209. val.data_ulong = GET_RS2C(insn, regs);
  210. #ifdef __riscv_flen
  211. } else if ((insn & INSN_MASK_C_FSD) == INSN_MATCH_C_FSD) {
  212. len = 8;
  213. val.data_u64 = GET_F64_RS2S(insn, regs);
  214. } else if ((insn & INSN_MASK_C_FSDSP) == INSN_MATCH_C_FSDSP) {
  215. len = 8;
  216. val.data_u64 = GET_F64_RS2C(insn, regs);
  217. #if __riscv_xlen == 32
  218. } else if ((insn & INSN_MASK_C_FSW) == INSN_MATCH_C_FSW) {
  219. len = 4;
  220. val.data_ulong = GET_F32_RS2S(insn, regs);
  221. } else if ((insn & INSN_MASK_C_FSWSP) == INSN_MATCH_C_FSWSP) {
  222. len = 4;
  223. val.data_ulong = GET_F32_RS2C(insn, regs);
  224. #endif
  225. #endif
  226. } else {
  227. uptrap.epc = regs->mepc;
  228. uptrap.cause = CAUSE_MISALIGNED_STORE;
  229. uptrap.tval = addr;
  230. uptrap.tval2 = tval2;
  231. uptrap.tinst = tinst;
  232. uptrap.gva = sbi_regs_gva(regs);
  233. return sbi_trap_redirect(regs, &uptrap);
  234. }
  235. for (i = 0; i < len; i++) {
  236. sbi_store_u8((void *)(addr + i), val.data_bytes[i],
  237. &uptrap);
  238. if (uptrap.cause) {
  239. uptrap.epc = regs->mepc;
  240. uptrap.tinst = sbi_misaligned_tinst_fixup(
  241. tinst, uptrap.tinst, i);
  242. return sbi_trap_redirect(regs, &uptrap);
  243. }
  244. }
  245. regs->mepc += insn_len;
  246. return 0;
  247. }