local64.h 3.8 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef _ASM_GENERIC_LOCAL64_H
  3. #define _ASM_GENERIC_LOCAL64_H
  4. #include <linux/percpu.h>
  5. #include <asm/types.h>
  6. /*
  7. * A signed long type for operations which are atomic for a single CPU.
  8. * Usually used in combination with per-cpu variables.
  9. *
  10. * This is the default implementation, which uses atomic64_t. Which is
  11. * rather pointless. The whole point behind local64_t is that some processors
  12. * can perform atomic adds and subtracts in a manner which is atomic wrt IRQs
  13. * running on this CPU. local64_t allows exploitation of such capabilities.
  14. */
  15. /* Implement in terms of atomics. */
  16. #if BITS_PER_LONG == 64
  17. #include <asm/local.h>
  18. typedef struct {
  19. local_t a;
  20. } local64_t;
  21. #define LOCAL64_INIT(i) { LOCAL_INIT(i) }
  22. #define local64_read(l) local_read(&(l)->a)
  23. #define local64_set(l,i) local_set((&(l)->a),(i))
  24. #define local64_inc(l) local_inc(&(l)->a)
  25. #define local64_dec(l) local_dec(&(l)->a)
  26. #define local64_add(i,l) local_add((i),(&(l)->a))
  27. #define local64_sub(i,l) local_sub((i),(&(l)->a))
  28. #define local64_sub_and_test(i, l) local_sub_and_test((i), (&(l)->a))
  29. #define local64_dec_and_test(l) local_dec_and_test(&(l)->a)
  30. #define local64_inc_and_test(l) local_inc_and_test(&(l)->a)
  31. #define local64_add_negative(i, l) local_add_negative((i), (&(l)->a))
  32. #define local64_add_return(i, l) local_add_return((i), (&(l)->a))
  33. #define local64_sub_return(i, l) local_sub_return((i), (&(l)->a))
  34. #define local64_inc_return(l) local_inc_return(&(l)->a)
  35. #define local64_cmpxchg(l, o, n) local_cmpxchg((&(l)->a), (o), (n))
  36. #define local64_xchg(l, n) local_xchg((&(l)->a), (n))
  37. #define local64_add_unless(l, _a, u) local_add_unless((&(l)->a), (_a), (u))
  38. #define local64_inc_not_zero(l) local_inc_not_zero(&(l)->a)
  39. /* Non-atomic variants, ie. preemption disabled and won't be touched
  40. * in interrupt, etc. Some archs can optimize this case well. */
  41. #define __local64_inc(l) local64_set((l), local64_read(l) + 1)
  42. #define __local64_dec(l) local64_set((l), local64_read(l) - 1)
  43. #define __local64_add(i,l) local64_set((l), local64_read(l) + (i))
  44. #define __local64_sub(i,l) local64_set((l), local64_read(l) - (i))
  45. #else /* BITS_PER_LONG != 64 */
  46. #include <linux/atomic.h>
  47. /* Don't use typedef: don't want them to be mixed with atomic_t's. */
  48. typedef struct {
  49. atomic64_t a;
  50. } local64_t;
  51. #define LOCAL64_INIT(i) { ATOMIC_LONG_INIT(i) }
  52. #define local64_read(l) atomic64_read(&(l)->a)
  53. #define local64_set(l,i) atomic64_set((&(l)->a),(i))
  54. #define local64_inc(l) atomic64_inc(&(l)->a)
  55. #define local64_dec(l) atomic64_dec(&(l)->a)
  56. #define local64_add(i,l) atomic64_add((i),(&(l)->a))
  57. #define local64_sub(i,l) atomic64_sub((i),(&(l)->a))
  58. #define local64_sub_and_test(i, l) atomic64_sub_and_test((i), (&(l)->a))
  59. #define local64_dec_and_test(l) atomic64_dec_and_test(&(l)->a)
  60. #define local64_inc_and_test(l) atomic64_inc_and_test(&(l)->a)
  61. #define local64_add_negative(i, l) atomic64_add_negative((i), (&(l)->a))
  62. #define local64_add_return(i, l) atomic64_add_return((i), (&(l)->a))
  63. #define local64_sub_return(i, l) atomic64_sub_return((i), (&(l)->a))
  64. #define local64_inc_return(l) atomic64_inc_return(&(l)->a)
  65. #define local64_cmpxchg(l, o, n) atomic64_cmpxchg((&(l)->a), (o), (n))
  66. #define local64_xchg(l, n) atomic64_xchg((&(l)->a), (n))
  67. #define local64_add_unless(l, _a, u) atomic64_add_unless((&(l)->a), (_a), (u))
  68. #define local64_inc_not_zero(l) atomic64_inc_not_zero(&(l)->a)
  69. /* Non-atomic variants, ie. preemption disabled and won't be touched
  70. * in interrupt, etc. Some archs can optimize this case well. */
  71. #define __local64_inc(l) local64_set((l), local64_read(l) + 1)
  72. #define __local64_dec(l) local64_set((l), local64_read(l) - 1)
  73. #define __local64_add(i,l) local64_set((l), local64_read(l) + (i))
  74. #define __local64_sub(i,l) local64_set((l), local64_read(l) - (i))
  75. #endif /* BITS_PER_LONG != 64 */
  76. #endif /* _ASM_GENERIC_LOCAL64_H */