atomic.h 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150
  1. /* SPDX-License-Identifier: GPL-2.0+ */
  2. #ifndef _ASM_GENERIC_ATOMIC_H
  3. #define _ASM_GENERIC_ATOMIC_H
  4. typedef struct { volatile int counter; } atomic_t;
  5. #if BITS_PER_LONG == 32
  6. typedef struct { volatile long long counter; } atomic64_t;
  7. #else /* BIT_PER_LONG == 32 */
  8. typedef struct { volatile long counter; } atomic64_t;
  9. #endif
  10. #define ATOMIC_INIT(i) { (i) }
  11. #define atomic_read(v) ((v)->counter)
  12. #define atomic_set(v, i) ((v)->counter = (i))
  13. #define atomic64_read(v) atomic_read(v)
  14. #define atomic64_set(v, i) atomic_set(v, i)
  15. static inline void atomic_add(int i, atomic_t *v)
  16. {
  17. unsigned long flags = 0;
  18. local_irq_save(flags);
  19. v->counter += i;
  20. local_irq_restore(flags);
  21. }
  22. static inline void atomic_sub(int i, atomic_t *v)
  23. {
  24. unsigned long flags = 0;
  25. local_irq_save(flags);
  26. v->counter -= i;
  27. local_irq_restore(flags);
  28. }
  29. static inline void atomic_inc(atomic_t *v)
  30. {
  31. unsigned long flags = 0;
  32. local_irq_save(flags);
  33. ++v->counter;
  34. local_irq_restore(flags);
  35. }
  36. static inline void atomic_dec(atomic_t *v)
  37. {
  38. unsigned long flags = 0;
  39. local_irq_save(flags);
  40. --v->counter;
  41. local_irq_restore(flags);
  42. }
  43. static inline int atomic_dec_and_test(volatile atomic_t *v)
  44. {
  45. unsigned long flags = 0;
  46. int val;
  47. local_irq_save(flags);
  48. val = v->counter;
  49. v->counter = val -= 1;
  50. local_irq_restore(flags);
  51. return val == 0;
  52. }
  53. static inline int atomic_add_negative(int i, volatile atomic_t *v)
  54. {
  55. unsigned long flags = 0;
  56. int val;
  57. local_irq_save(flags);
  58. val = v->counter;
  59. v->counter = val += i;
  60. local_irq_restore(flags);
  61. return val < 0;
  62. }
  63. static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
  64. {
  65. unsigned long flags = 0;
  66. local_irq_save(flags);
  67. *addr &= ~mask;
  68. local_irq_restore(flags);
  69. }
  70. #if BITS_PER_LONG == 32
  71. static inline void atomic64_add(long long i, volatile atomic64_t *v)
  72. {
  73. unsigned long flags = 0;
  74. local_irq_save(flags);
  75. v->counter += i;
  76. local_irq_restore(flags);
  77. }
  78. static inline void atomic64_sub(long long i, volatile atomic64_t *v)
  79. {
  80. unsigned long flags = 0;
  81. local_irq_save(flags);
  82. v->counter -= i;
  83. local_irq_restore(flags);
  84. }
  85. #else /* BIT_PER_LONG == 32 */
  86. static inline void atomic64_add(long i, volatile atomic64_t *v)
  87. {
  88. unsigned long flags = 0;
  89. local_irq_save(flags);
  90. v->counter += i;
  91. local_irq_restore(flags);
  92. }
  93. static inline void atomic64_sub(long i, volatile atomic64_t *v)
  94. {
  95. unsigned long flags = 0;
  96. local_irq_save(flags);
  97. v->counter -= i;
  98. local_irq_restore(flags);
  99. }
  100. #endif
  101. static inline void atomic64_inc(volatile atomic64_t *v)
  102. {
  103. unsigned long flags = 0;
  104. local_irq_save(flags);
  105. v->counter += 1;
  106. local_irq_restore(flags);
  107. }
  108. static inline void atomic64_dec(volatile atomic64_t *v)
  109. {
  110. unsigned long flags = 0;
  111. local_irq_save(flags);
  112. v->counter -= 1;
  113. local_irq_restore(flags);
  114. }
  115. #endif