qrwlock.h 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130
  1. /* SPDX-License-Identifier: GPL-2.0-or-later */
  2. /*
  3. * Queue read/write lock
  4. *
  5. * (C) Copyright 2013-2014 Hewlett-Packard Development Company, L.P.
  6. *
  7. * Authors: Waiman Long <waiman.long@hp.com>
  8. */
  9. #ifndef __ASM_GENERIC_QRWLOCK_H
  10. #define __ASM_GENERIC_QRWLOCK_H
  11. #include <linux/atomic.h>
  12. #include <asm/barrier.h>
  13. #include <asm/processor.h>
  14. #include <asm-generic/qrwlock_types.h>
  15. /*
  16. * Writer states & reader shift and bias.
  17. */
  18. #define _QW_WAITING 0x100 /* A writer is waiting */
  19. #define _QW_LOCKED 0x0ff /* A writer holds the lock */
  20. #define _QW_WMASK 0x1ff /* Writer mask */
  21. #define _QR_SHIFT 9 /* Reader count shift */
  22. #define _QR_BIAS (1U << _QR_SHIFT)
  23. /*
  24. * External function declarations
  25. */
  26. extern void queued_read_lock_slowpath(struct qrwlock *lock);
  27. extern void queued_write_lock_slowpath(struct qrwlock *lock);
  28. /**
  29. * queued_read_trylock - try to acquire read lock of a queue rwlock
  30. * @lock : Pointer to queue rwlock structure
  31. * Return: 1 if lock acquired, 0 if failed
  32. */
  33. static inline int queued_read_trylock(struct qrwlock *lock)
  34. {
  35. u32 cnts;
  36. cnts = atomic_read(&lock->cnts);
  37. if (likely(!(cnts & _QW_WMASK))) {
  38. cnts = (u32)atomic_add_return_acquire(_QR_BIAS, &lock->cnts);
  39. if (likely(!(cnts & _QW_WMASK)))
  40. return 1;
  41. atomic_sub(_QR_BIAS, &lock->cnts);
  42. }
  43. return 0;
  44. }
  45. /**
  46. * queued_write_trylock - try to acquire write lock of a queue rwlock
  47. * @lock : Pointer to queue rwlock structure
  48. * Return: 1 if lock acquired, 0 if failed
  49. */
  50. static inline int queued_write_trylock(struct qrwlock *lock)
  51. {
  52. u32 cnts;
  53. cnts = atomic_read(&lock->cnts);
  54. if (unlikely(cnts))
  55. return 0;
  56. return likely(atomic_try_cmpxchg_acquire(&lock->cnts, &cnts,
  57. _QW_LOCKED));
  58. }
  59. /**
  60. * queued_read_lock - acquire read lock of a queue rwlock
  61. * @lock: Pointer to queue rwlock structure
  62. */
  63. static inline void queued_read_lock(struct qrwlock *lock)
  64. {
  65. u32 cnts;
  66. cnts = atomic_add_return_acquire(_QR_BIAS, &lock->cnts);
  67. if (likely(!(cnts & _QW_WMASK)))
  68. return;
  69. /* The slowpath will decrement the reader count, if necessary. */
  70. queued_read_lock_slowpath(lock);
  71. }
  72. /**
  73. * queued_write_lock - acquire write lock of a queue rwlock
  74. * @lock : Pointer to queue rwlock structure
  75. */
  76. static inline void queued_write_lock(struct qrwlock *lock)
  77. {
  78. u32 cnts = 0;
  79. /* Optimize for the unfair lock case where the fair flag is 0. */
  80. if (likely(atomic_try_cmpxchg_acquire(&lock->cnts, &cnts, _QW_LOCKED)))
  81. return;
  82. queued_write_lock_slowpath(lock);
  83. }
  84. /**
  85. * queued_read_unlock - release read lock of a queue rwlock
  86. * @lock : Pointer to queue rwlock structure
  87. */
  88. static inline void queued_read_unlock(struct qrwlock *lock)
  89. {
  90. /*
  91. * Atomically decrement the reader count
  92. */
  93. (void)atomic_sub_return_release(_QR_BIAS, &lock->cnts);
  94. }
  95. /**
  96. * queued_write_unlock - release write lock of a queue rwlock
  97. * @lock : Pointer to queue rwlock structure
  98. */
  99. static inline void queued_write_unlock(struct qrwlock *lock)
  100. {
  101. smp_store_release(&lock->wlocked, 0);
  102. }
  103. /*
  104. * Remapping rwlock architecture specific functions to the corresponding
  105. * queue rwlock functions.
  106. */
  107. #define arch_read_lock(l) queued_read_lock(l)
  108. #define arch_write_lock(l) queued_write_lock(l)
  109. #define arch_read_trylock(l) queued_read_trylock(l)
  110. #define arch_write_trylock(l) queued_write_trylock(l)
  111. #define arch_read_unlock(l) queued_read_unlock(l)
  112. #define arch_write_unlock(l) queued_write_unlock(l)
  113. #endif /* __ASM_GENERIC_QRWLOCK_H */