atomic.h 5.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150
  1. /*
  2. * Based on arch/arm/include/asm/atomic.h
  3. *
  4. * Copyright (C) 1996 Russell King.
  5. * Copyright (C) 2002 Deep Blue Solutions Ltd.
  6. * Copyright (C) 2012 ARM Ltd.
  7. *
  8. * This program is free software; you can redistribute it and/or modify
  9. * it under the terms of the GNU General Public License version 2 as
  10. * published by the Free Software Foundation.
  11. *
  12. * This program is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. * GNU General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU General Public License
  18. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  19. */
  20. #ifndef __ASM_ATOMIC_H
  21. #define __ASM_ATOMIC_H
  22. #include <linux/compiler.h>
  23. #include <linux/types.h>
  24. #include <asm/barrier.h>
  25. #include <asm/lse.h>
  26. #ifdef __KERNEL__
  27. #define __ARM64_IN_ATOMIC_IMPL
  28. #if defined(CONFIG_ARM64_LSE_ATOMICS) && defined(CONFIG_AS_LSE)
  29. #include <asm/atomic_lse.h>
  30. #else
  31. #include <asm/atomic_ll_sc.h>
  32. #endif
  33. #undef __ARM64_IN_ATOMIC_IMPL
  34. #include <asm/cmpxchg.h>
  35. #define ___atomic_add_unless(v, a, u, sfx) \
  36. ({ \
  37. typeof((v)->counter) c, old; \
  38. \
  39. c = atomic##sfx##_read(v); \
  40. while (c != (u) && \
  41. (old = atomic##sfx##_cmpxchg((v), c, c + (a))) != c) \
  42. c = old; \
  43. c; \
  44. })
  45. #define ATOMIC_INIT(i) { (i) }
  46. #define atomic_read(v) READ_ONCE((v)->counter)
  47. #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
  48. #define atomic_add_return_relaxed atomic_add_return_relaxed
  49. #define atomic_add_return_acquire atomic_add_return_acquire
  50. #define atomic_add_return_release atomic_add_return_release
  51. #define atomic_add_return atomic_add_return
  52. #define atomic_inc_return_relaxed(v) atomic_add_return_relaxed(1, (v))
  53. #define atomic_inc_return_acquire(v) atomic_add_return_acquire(1, (v))
  54. #define atomic_inc_return_release(v) atomic_add_return_release(1, (v))
  55. #define atomic_inc_return(v) atomic_add_return(1, (v))
  56. #define atomic_sub_return_relaxed atomic_sub_return_relaxed
  57. #define atomic_sub_return_acquire atomic_sub_return_acquire
  58. #define atomic_sub_return_release atomic_sub_return_release
  59. #define atomic_sub_return atomic_sub_return
  60. #define atomic_dec_return_relaxed(v) atomic_sub_return_relaxed(1, (v))
  61. #define atomic_dec_return_acquire(v) atomic_sub_return_acquire(1, (v))
  62. #define atomic_dec_return_release(v) atomic_sub_return_release(1, (v))
  63. #define atomic_dec_return(v) atomic_sub_return(1, (v))
  64. #define atomic_xchg_relaxed(v, new) xchg_relaxed(&((v)->counter), (new))
  65. #define atomic_xchg_acquire(v, new) xchg_acquire(&((v)->counter), (new))
  66. #define atomic_xchg_release(v, new) xchg_release(&((v)->counter), (new))
  67. #define atomic_xchg(v, new) xchg(&((v)->counter), (new))
  68. #define atomic_cmpxchg_relaxed(v, old, new) \
  69. cmpxchg_relaxed(&((v)->counter), (old), (new))
  70. #define atomic_cmpxchg_acquire(v, old, new) \
  71. cmpxchg_acquire(&((v)->counter), (old), (new))
  72. #define atomic_cmpxchg_release(v, old, new) \
  73. cmpxchg_release(&((v)->counter), (old), (new))
  74. #define atomic_cmpxchg(v, old, new) cmpxchg(&((v)->counter), (old), (new))
  75. #define atomic_inc(v) atomic_add(1, (v))
  76. #define atomic_dec(v) atomic_sub(1, (v))
  77. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  78. #define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
  79. #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
  80. #define atomic_add_negative(i, v) (atomic_add_return((i), (v)) < 0)
  81. #define __atomic_add_unless(v, a, u) ___atomic_add_unless(v, a, u,)
  82. #define atomic_andnot atomic_andnot
  83. /*
  84. * 64-bit atomic operations.
  85. */
  86. #define ATOMIC64_INIT ATOMIC_INIT
  87. #define atomic64_read atomic_read
  88. #define atomic64_set atomic_set
  89. #define atomic64_add_return_relaxed atomic64_add_return_relaxed
  90. #define atomic64_add_return_acquire atomic64_add_return_acquire
  91. #define atomic64_add_return_release atomic64_add_return_release
  92. #define atomic64_add_return atomic64_add_return
  93. #define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1, (v))
  94. #define atomic64_inc_return_acquire(v) atomic64_add_return_acquire(1, (v))
  95. #define atomic64_inc_return_release(v) atomic64_add_return_release(1, (v))
  96. #define atomic64_inc_return(v) atomic64_add_return(1, (v))
  97. #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
  98. #define atomic64_sub_return_acquire atomic64_sub_return_acquire
  99. #define atomic64_sub_return_release atomic64_sub_return_release
  100. #define atomic64_sub_return atomic64_sub_return
  101. #define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1, (v))
  102. #define atomic64_dec_return_acquire(v) atomic64_sub_return_acquire(1, (v))
  103. #define atomic64_dec_return_release(v) atomic64_sub_return_release(1, (v))
  104. #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
  105. #define atomic64_xchg_relaxed atomic_xchg_relaxed
  106. #define atomic64_xchg_acquire atomic_xchg_acquire
  107. #define atomic64_xchg_release atomic_xchg_release
  108. #define atomic64_xchg atomic_xchg
  109. #define atomic64_cmpxchg_relaxed atomic_cmpxchg_relaxed
  110. #define atomic64_cmpxchg_acquire atomic_cmpxchg_acquire
  111. #define atomic64_cmpxchg_release atomic_cmpxchg_release
  112. #define atomic64_cmpxchg atomic_cmpxchg
  113. #define atomic64_inc(v) atomic64_add(1, (v))
  114. #define atomic64_dec(v) atomic64_sub(1, (v))
  115. #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  116. #define atomic64_dec_and_test(v) (atomic64_dec_return(v) == 0)
  117. #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
  118. #define atomic64_add_negative(i, v) (atomic64_add_return((i), (v)) < 0)
  119. #define atomic64_add_unless(v, a, u) (___atomic_add_unless(v, a, u, 64) != u)
  120. #define atomic64_andnot atomic64_andnot
  121. #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
  122. #endif
  123. #endif