futex.h 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /*
  3. * Copyright (c) 2006 Ralf Baechle (ralf@linux-mips.org)
  4. * Copyright (c) 2018 Jim Wilson (jimw@sifive.com)
  5. */
  6. #ifndef _ASM_RISCV_FUTEX_H
  7. #define _ASM_RISCV_FUTEX_H
  8. #include <linux/futex.h>
  9. #include <linux/uaccess.h>
  10. #include <linux/errno.h>
  11. #include <asm/asm.h>
  12. /* We don't even really need the extable code, but for now keep it simple */
  13. #ifndef CONFIG_MMU
  14. #define __enable_user_access() do { } while (0)
  15. #define __disable_user_access() do { } while (0)
  16. #endif
  17. #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
  18. { \
  19. uintptr_t tmp; \
  20. __enable_user_access(); \
  21. __asm__ __volatile__ ( \
  22. "1: " insn " \n" \
  23. "2: \n" \
  24. " .section .fixup,\"ax\" \n" \
  25. " .balign 4 \n" \
  26. "3: li %[r],%[e] \n" \
  27. " jump 2b,%[t] \n" \
  28. " .previous \n" \
  29. " .section __ex_table,\"a\" \n" \
  30. " .balign " RISCV_SZPTR " \n" \
  31. " " RISCV_PTR " 1b, 3b \n" \
  32. " .previous \n" \
  33. : [r] "+r" (ret), [ov] "=&r" (oldval), \
  34. [u] "+m" (*uaddr), [t] "=&r" (tmp) \
  35. : [op] "Jr" (oparg), [e] "i" (-EFAULT) \
  36. : "memory"); \
  37. __disable_user_access(); \
  38. }
  39. static inline int
  40. arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
  41. {
  42. int oldval = 0, ret = 0;
  43. if (!access_ok(uaddr, sizeof(u32)))
  44. return -EFAULT;
  45. switch (op) {
  46. case FUTEX_OP_SET:
  47. __futex_atomic_op("amoswap.w.aqrl %[ov],%z[op],%[u]",
  48. ret, oldval, uaddr, oparg);
  49. break;
  50. case FUTEX_OP_ADD:
  51. __futex_atomic_op("amoadd.w.aqrl %[ov],%z[op],%[u]",
  52. ret, oldval, uaddr, oparg);
  53. break;
  54. case FUTEX_OP_OR:
  55. __futex_atomic_op("amoor.w.aqrl %[ov],%z[op],%[u]",
  56. ret, oldval, uaddr, oparg);
  57. break;
  58. case FUTEX_OP_ANDN:
  59. __futex_atomic_op("amoand.w.aqrl %[ov],%z[op],%[u]",
  60. ret, oldval, uaddr, ~oparg);
  61. break;
  62. case FUTEX_OP_XOR:
  63. __futex_atomic_op("amoxor.w.aqrl %[ov],%z[op],%[u]",
  64. ret, oldval, uaddr, oparg);
  65. break;
  66. default:
  67. ret = -ENOSYS;
  68. }
  69. if (!ret)
  70. *oval = oldval;
  71. return ret;
  72. }
  73. static inline int
  74. futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
  75. u32 oldval, u32 newval)
  76. {
  77. int ret = 0;
  78. u32 val;
  79. uintptr_t tmp;
  80. if (!access_ok(uaddr, sizeof(u32)))
  81. return -EFAULT;
  82. __enable_user_access();
  83. __asm__ __volatile__ (
  84. "1: lr.w.aqrl %[v],%[u] \n"
  85. " bne %[v],%z[ov],3f \n"
  86. "2: sc.w.aqrl %[t],%z[nv],%[u] \n"
  87. " bnez %[t],1b \n"
  88. "3: \n"
  89. " .section .fixup,\"ax\" \n"
  90. " .balign 4 \n"
  91. "4: li %[r],%[e] \n"
  92. " jump 3b,%[t] \n"
  93. " .previous \n"
  94. " .section __ex_table,\"a\" \n"
  95. " .balign " RISCV_SZPTR " \n"
  96. " " RISCV_PTR " 1b, 4b \n"
  97. " " RISCV_PTR " 2b, 4b \n"
  98. " .previous \n"
  99. : [r] "+r" (ret), [v] "=&r" (val), [u] "+m" (*uaddr), [t] "=&r" (tmp)
  100. : [ov] "Jr" (oldval), [nv] "Jr" (newval), [e] "i" (-EFAULT)
  101. : "memory");
  102. __disable_user_access();
  103. *uval = val;
  104. return ret;
  105. }
  106. #endif /* _ASM_RISCV_FUTEX_H */