sbi_misaligned_ldst.c 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175
  1. /*
  2. * SPDX-License-Identifier: BSD-2-Clause
  3. *
  4. * Copyright (c) 2019 Western Digital Corporation or its affiliates.
  5. *
  6. * Authors:
  7. * Anup Patel <anup.patel@wdc.com>
  8. */
  9. #include <sbi/riscv_asm.h>
  10. #include <sbi/riscv_encoding.h>
  11. #include <sbi/riscv_unpriv.h>
  12. #include <sbi/riscv_fp.h>
  13. #include <sbi/sbi_error.h>
  14. #include <sbi/sbi_misaligned_ldst.h>
  15. #include <sbi/sbi_trap.h>
  16. union reg_data {
  17. u8 data_bytes[8];
  18. ulong data_ulong;
  19. u64 data_u64;
  20. };
  21. int sbi_misaligned_load_handler(u32 hartid, ulong mcause,
  22. struct sbi_trap_regs *regs,
  23. struct sbi_scratch *scratch)
  24. {
  25. union reg_data val;
  26. ulong insn = get_insn(regs->mepc, NULL);
  27. ulong addr = csr_read(CSR_MTVAL);
  28. int i, fp = 0, shift = 0, len = 0;
  29. if ((insn & INSN_MASK_LW) == INSN_MATCH_LW) {
  30. len = 4;
  31. shift = 8 * (sizeof(ulong) - len);
  32. #if __riscv_xlen == 64
  33. } else if ((insn & INSN_MASK_LD) == INSN_MATCH_LD) {
  34. len = 8;
  35. shift = 8 * (sizeof(ulong) - len);
  36. } else if ((insn & INSN_MASK_LWU) == INSN_MATCH_LWU) {
  37. len = 4;
  38. #endif
  39. } else if ((insn & INSN_MASK_FLD) == INSN_MATCH_FLD) {
  40. fp = 1;
  41. len = 8;
  42. } else if ((insn & INSN_MASK_FLW) == INSN_MATCH_FLW) {
  43. fp = 1;
  44. len = 4;
  45. } else if ((insn & INSN_MASK_LH) == INSN_MATCH_LH) {
  46. len = 2;
  47. shift = 8 * (sizeof(ulong) - len);
  48. } else if ((insn & INSN_MASK_LHU) == INSN_MATCH_LHU) {
  49. len = 2;
  50. #ifdef __riscv_compressed
  51. # if __riscv_xlen >= 64
  52. } else if ((insn & INSN_MASK_C_LD) == INSN_MATCH_C_LD) {
  53. len = 8;
  54. shift = 8 * (sizeof(ulong) - len);
  55. insn = RVC_RS2S(insn) << SH_RD;
  56. } else if ((insn & INSN_MASK_C_LDSP) == INSN_MATCH_C_LDSP &&
  57. ((insn >> SH_RD) & 0x1f)) {
  58. len = 8;
  59. shift = 8 * (sizeof(ulong) - len);
  60. # endif
  61. } else if ((insn & INSN_MASK_C_LW) ==INSN_MATCH_C_LW) {
  62. len = 4;
  63. shift = 8 * (sizeof(ulong) - len);
  64. insn = RVC_RS2S(insn) << SH_RD;
  65. } else if ((insn & INSN_MASK_C_LWSP) == INSN_MATCH_C_LWSP &&
  66. ((insn >> SH_RD) & 0x1f)) {
  67. len = 4;
  68. shift = 8 * (sizeof(ulong) - len);
  69. } else if ((insn & INSN_MASK_C_FLD) == INSN_MATCH_C_FLD) {
  70. fp = 1;
  71. len = 8;
  72. insn = RVC_RS2S(insn) << SH_RD;
  73. } else if ((insn & INSN_MASK_C_FLDSP) == INSN_MATCH_C_FLDSP) {
  74. fp = 1;
  75. len = 8;
  76. # if __riscv_xlen == 32
  77. } else if ((insn & INSN_MASK_C_FLW) == INSN_MATCH_C_FLW) {
  78. fp = 1;
  79. len = 4;
  80. insn = RVC_RS2S(insn) << SH_RD;
  81. } else if ((insn & INSN_MASK_C_FLWSP) == INSN_MATCH_C_FLWSP) {
  82. fp = 1;
  83. len = 4;
  84. # endif
  85. #endif
  86. } else
  87. return SBI_EILL;
  88. val.data_u64 = 0;
  89. for (i = 0; i < len; i++)
  90. val.data_bytes[i] = load_u8((void *)(addr + i));
  91. if (!fp)
  92. SET_RD(insn, regs, val.data_ulong << shift >> shift);
  93. else if (len == 8)
  94. SET_F64_RD(insn, regs, val.data_u64);
  95. else
  96. SET_F32_RD(insn, regs, val.data_ulong);
  97. regs->mepc += INSN_LEN(insn);
  98. return 0;
  99. }
  100. int sbi_misaligned_store_handler(u32 hartid, ulong mcause,
  101. struct sbi_trap_regs *regs,
  102. struct sbi_scratch *scratch)
  103. {
  104. union reg_data val;
  105. ulong insn = get_insn(regs->mepc, NULL);
  106. ulong addr = csr_read(CSR_MTVAL);
  107. int i, len = 0;
  108. val.data_ulong = GET_RS2(insn, regs);
  109. if ((insn & INSN_MASK_SW) == INSN_MATCH_SW) {
  110. len = 4;
  111. #if __riscv_xlen == 64
  112. } else if ((insn & INSN_MASK_SD) == INSN_MATCH_SD) {
  113. len = 8;
  114. #endif
  115. } else if ((insn & INSN_MASK_FSD) == INSN_MATCH_FSD) {
  116. len = 8;
  117. val.data_u64 = GET_F64_RS2(insn, regs);
  118. } else if ((insn & INSN_MASK_FSW) == INSN_MATCH_FSW) {
  119. len = 4;
  120. val.data_ulong = GET_F32_RS2(insn, regs);
  121. } else if ((insn & INSN_MASK_SH) == INSN_MATCH_SH) {
  122. len = 2;
  123. #ifdef __riscv_compressed
  124. # if __riscv_xlen >= 64
  125. } else if ((insn & INSN_MASK_C_SD) == INSN_MATCH_C_SD) {
  126. len = 8;
  127. val.data_ulong = GET_RS2S(insn, regs);
  128. } else if ((insn & INSN_MASK_C_SDSP) == INSN_MATCH_C_SDSP &&
  129. ((insn >> SH_RD) & 0x1f)) {
  130. len = 8;
  131. val.data_ulong = GET_RS2C(insn, regs);
  132. # endif
  133. } else if ((insn & INSN_MASK_C_SW) == INSN_MATCH_C_SW) {
  134. len = 4;
  135. val.data_ulong = GET_RS2S(insn, regs);
  136. } else if ((insn & INSN_MASK_C_SWSP) == INSN_MATCH_C_SWSP &&
  137. ((insn >> SH_RD) & 0x1f)) {
  138. len = 4;
  139. val.data_ulong = GET_RS2C(insn, regs);
  140. } else if ((insn & INSN_MASK_C_FSD) == INSN_MATCH_C_FSD) {
  141. len = 8;
  142. val.data_u64 = GET_F64_RS2S(insn, regs);
  143. } else if ((insn & INSN_MASK_C_FSDSP) == INSN_MATCH_C_FSDSP) {
  144. len = 8;
  145. val.data_u64 = GET_F64_RS2C(insn, regs);
  146. # if __riscv_xlen == 32
  147. } else if ((insn & INSN_MASK_C_FSW) == INSN_MATCH_C_FSW) {
  148. len = 4;
  149. val.data_ulong = GET_F32_RS2S(insn, regs);
  150. } else if ((insn & INSN_MASK_C_FSWSP) == INSN_MATCH_C_FSWSP) {
  151. len = 4;
  152. val.data_ulong = GET_F32_RS2C(insn, regs);
  153. # endif
  154. #endif
  155. } else
  156. return SBI_EILL;
  157. for (i = 0; i < len; i++)
  158. store_u8((void *)(addr + i), val.data_bytes[i]);
  159. regs->mepc += INSN_LEN(insn);
  160. return 0;
  161. }