atomic_64.S 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /* atomic.S: These things are too big to do inline.
  3. *
  4. * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net)
  5. */
  6. #include <linux/linkage.h>
  7. #include <asm/asi.h>
  8. #include <asm/backoff.h>
  9. #include <asm/export.h>
  10. .text
  11. /* Three versions of the atomic routines, one that
  12. * does not return a value and does not perform
  13. * memory barriers, and a two which return
  14. * a value, the new and old value resp. and does the
  15. * barriers.
  16. */
  17. #define ATOMIC_OP(op) \
  18. ENTRY(atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
  19. BACKOFF_SETUP(%o2); \
  20. 1: lduw [%o1], %g1; \
  21. op %g1, %o0, %g7; \
  22. cas [%o1], %g1, %g7; \
  23. cmp %g1, %g7; \
  24. bne,pn %icc, BACKOFF_LABEL(2f, 1b); \
  25. nop; \
  26. retl; \
  27. nop; \
  28. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  29. ENDPROC(atomic_##op); \
  30. EXPORT_SYMBOL(atomic_##op);
  31. #define ATOMIC_OP_RETURN(op) \
  32. ENTRY(atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \
  33. BACKOFF_SETUP(%o2); \
  34. 1: lduw [%o1], %g1; \
  35. op %g1, %o0, %g7; \
  36. cas [%o1], %g1, %g7; \
  37. cmp %g1, %g7; \
  38. bne,pn %icc, BACKOFF_LABEL(2f, 1b); \
  39. op %g1, %o0, %g1; \
  40. retl; \
  41. sra %g1, 0, %o0; \
  42. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  43. ENDPROC(atomic_##op##_return); \
  44. EXPORT_SYMBOL(atomic_##op##_return);
  45. #define ATOMIC_FETCH_OP(op) \
  46. ENTRY(atomic_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
  47. BACKOFF_SETUP(%o2); \
  48. 1: lduw [%o1], %g1; \
  49. op %g1, %o0, %g7; \
  50. cas [%o1], %g1, %g7; \
  51. cmp %g1, %g7; \
  52. bne,pn %icc, BACKOFF_LABEL(2f, 1b); \
  53. nop; \
  54. retl; \
  55. sra %g1, 0, %o0; \
  56. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  57. ENDPROC(atomic_fetch_##op); \
  58. EXPORT_SYMBOL(atomic_fetch_##op);
  59. ATOMIC_OP(add)
  60. ATOMIC_OP_RETURN(add)
  61. ATOMIC_FETCH_OP(add)
  62. ATOMIC_OP(sub)
  63. ATOMIC_OP_RETURN(sub)
  64. ATOMIC_FETCH_OP(sub)
  65. ATOMIC_OP(and)
  66. ATOMIC_FETCH_OP(and)
  67. ATOMIC_OP(or)
  68. ATOMIC_FETCH_OP(or)
  69. ATOMIC_OP(xor)
  70. ATOMIC_FETCH_OP(xor)
  71. #undef ATOMIC_FETCH_OP
  72. #undef ATOMIC_OP_RETURN
  73. #undef ATOMIC_OP
  74. #define ATOMIC64_OP(op) \
  75. ENTRY(atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
  76. BACKOFF_SETUP(%o2); \
  77. 1: ldx [%o1], %g1; \
  78. op %g1, %o0, %g7; \
  79. casx [%o1], %g1, %g7; \
  80. cmp %g1, %g7; \
  81. bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \
  82. nop; \
  83. retl; \
  84. nop; \
  85. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  86. ENDPROC(atomic64_##op); \
  87. EXPORT_SYMBOL(atomic64_##op);
  88. #define ATOMIC64_OP_RETURN(op) \
  89. ENTRY(atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \
  90. BACKOFF_SETUP(%o2); \
  91. 1: ldx [%o1], %g1; \
  92. op %g1, %o0, %g7; \
  93. casx [%o1], %g1, %g7; \
  94. cmp %g1, %g7; \
  95. bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \
  96. nop; \
  97. retl; \
  98. op %g1, %o0, %o0; \
  99. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  100. ENDPROC(atomic64_##op##_return); \
  101. EXPORT_SYMBOL(atomic64_##op##_return);
  102. #define ATOMIC64_FETCH_OP(op) \
  103. ENTRY(atomic64_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
  104. BACKOFF_SETUP(%o2); \
  105. 1: ldx [%o1], %g1; \
  106. op %g1, %o0, %g7; \
  107. casx [%o1], %g1, %g7; \
  108. cmp %g1, %g7; \
  109. bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \
  110. nop; \
  111. retl; \
  112. mov %g1, %o0; \
  113. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  114. ENDPROC(atomic64_fetch_##op); \
  115. EXPORT_SYMBOL(atomic64_fetch_##op);
  116. ATOMIC64_OP(add)
  117. ATOMIC64_OP_RETURN(add)
  118. ATOMIC64_FETCH_OP(add)
  119. ATOMIC64_OP(sub)
  120. ATOMIC64_OP_RETURN(sub)
  121. ATOMIC64_FETCH_OP(sub)
  122. ATOMIC64_OP(and)
  123. ATOMIC64_FETCH_OP(and)
  124. ATOMIC64_OP(or)
  125. ATOMIC64_FETCH_OP(or)
  126. ATOMIC64_OP(xor)
  127. ATOMIC64_FETCH_OP(xor)
  128. #undef ATOMIC64_FETCH_OP
  129. #undef ATOMIC64_OP_RETURN
  130. #undef ATOMIC64_OP
  131. ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */
  132. BACKOFF_SETUP(%o2)
  133. 1: ldx [%o0], %g1
  134. brlez,pn %g1, 3f
  135. sub %g1, 1, %g7
  136. casx [%o0], %g1, %g7
  137. cmp %g1, %g7
  138. bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
  139. nop
  140. 3: retl
  141. sub %g1, 1, %o0
  142. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  143. ENDPROC(atomic64_dec_if_positive)
  144. EXPORT_SYMBOL(atomic64_dec_if_positive)