atomic.h 6.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212
  1. #ifndef _ASM_IA64_ATOMIC_H
  2. #define _ASM_IA64_ATOMIC_H
  3. /*
  4. * Atomic operations that C can't guarantee us. Useful for
  5. * resource counting etc..
  6. *
  7. * NOTE: don't mess with the types below! The "unsigned long" and
  8. * "int" types were carefully placed so as to ensure proper operation
  9. * of the macros.
  10. *
  11. * Copyright (C) 1998, 1999, 2002-2003 Hewlett-Packard Co
  12. * David Mosberger-Tang <davidm@hpl.hp.com>
  13. */
  14. #include <linux/types.h>
  15. #include <asm/intrinsics.h>
  16. #include <asm/barrier.h>
  17. #define ATOMIC_INIT(i) { (i) }
  18. #define ATOMIC64_INIT(i) { (i) }
  19. #define atomic_read(v) READ_ONCE((v)->counter)
  20. #define atomic64_read(v) READ_ONCE((v)->counter)
  21. #define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i))
  22. #define atomic64_set(v,i) WRITE_ONCE(((v)->counter), (i))
  23. #define ATOMIC_OP(op, c_op) \
  24. static __inline__ int \
  25. ia64_atomic_##op (int i, atomic_t *v) \
  26. { \
  27. __s32 old, new; \
  28. CMPXCHG_BUGCHECK_DECL \
  29. \
  30. do { \
  31. CMPXCHG_BUGCHECK(v); \
  32. old = atomic_read(v); \
  33. new = old c_op i; \
  34. } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \
  35. return new; \
  36. }
  37. ATOMIC_OP(add, +)
  38. ATOMIC_OP(sub, -)
  39. #define atomic_add_return(i,v) \
  40. ({ \
  41. int __ia64_aar_i = (i); \
  42. (__builtin_constant_p(i) \
  43. && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
  44. || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
  45. || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
  46. || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
  47. ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
  48. : ia64_atomic_add(__ia64_aar_i, v); \
  49. })
  50. #define atomic_sub_return(i,v) \
  51. ({ \
  52. int __ia64_asr_i = (i); \
  53. (__builtin_constant_p(i) \
  54. && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
  55. || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
  56. || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
  57. || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
  58. ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
  59. : ia64_atomic_sub(__ia64_asr_i, v); \
  60. })
  61. ATOMIC_OP(and, &)
  62. ATOMIC_OP(or, |)
  63. ATOMIC_OP(xor, ^)
  64. #define atomic_and(i,v) (void)ia64_atomic_and(i,v)
  65. #define atomic_or(i,v) (void)ia64_atomic_or(i,v)
  66. #define atomic_xor(i,v) (void)ia64_atomic_xor(i,v)
  67. #undef ATOMIC_OP
  68. #define ATOMIC64_OP(op, c_op) \
  69. static __inline__ long \
  70. ia64_atomic64_##op (__s64 i, atomic64_t *v) \
  71. { \
  72. __s64 old, new; \
  73. CMPXCHG_BUGCHECK_DECL \
  74. \
  75. do { \
  76. CMPXCHG_BUGCHECK(v); \
  77. old = atomic64_read(v); \
  78. new = old c_op i; \
  79. } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \
  80. return new; \
  81. }
  82. ATOMIC64_OP(add, +)
  83. ATOMIC64_OP(sub, -)
  84. #define atomic64_add_return(i,v) \
  85. ({ \
  86. long __ia64_aar_i = (i); \
  87. (__builtin_constant_p(i) \
  88. && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
  89. || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
  90. || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
  91. || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
  92. ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
  93. : ia64_atomic64_add(__ia64_aar_i, v); \
  94. })
  95. #define atomic64_sub_return(i,v) \
  96. ({ \
  97. long __ia64_asr_i = (i); \
  98. (__builtin_constant_p(i) \
  99. && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
  100. || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
  101. || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
  102. || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
  103. ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
  104. : ia64_atomic64_sub(__ia64_asr_i, v); \
  105. })
  106. ATOMIC64_OP(and, &)
  107. ATOMIC64_OP(or, |)
  108. ATOMIC64_OP(xor, ^)
  109. #define atomic64_and(i,v) (void)ia64_atomic64_and(i,v)
  110. #define atomic64_or(i,v) (void)ia64_atomic64_or(i,v)
  111. #define atomic64_xor(i,v) (void)ia64_atomic64_xor(i,v)
  112. #undef ATOMIC64_OP
  113. #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
  114. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  115. #define atomic64_cmpxchg(v, old, new) \
  116. (cmpxchg(&((v)->counter), old, new))
  117. #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
  118. static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
  119. {
  120. int c, old;
  121. c = atomic_read(v);
  122. for (;;) {
  123. if (unlikely(c == (u)))
  124. break;
  125. old = atomic_cmpxchg((v), c, c + (a));
  126. if (likely(old == c))
  127. break;
  128. c = old;
  129. }
  130. return c;
  131. }
  132. static __inline__ long atomic64_add_unless(atomic64_t *v, long a, long u)
  133. {
  134. long c, old;
  135. c = atomic64_read(v);
  136. for (;;) {
  137. if (unlikely(c == (u)))
  138. break;
  139. old = atomic64_cmpxchg((v), c, c + (a));
  140. if (likely(old == c))
  141. break;
  142. c = old;
  143. }
  144. return c != (u);
  145. }
  146. #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
  147. /*
  148. * Atomically add I to V and return TRUE if the resulting value is
  149. * negative.
  150. */
  151. static __inline__ int
  152. atomic_add_negative (int i, atomic_t *v)
  153. {
  154. return atomic_add_return(i, v) < 0;
  155. }
  156. static __inline__ long
  157. atomic64_add_negative (__s64 i, atomic64_t *v)
  158. {
  159. return atomic64_add_return(i, v) < 0;
  160. }
  161. #define atomic_dec_return(v) atomic_sub_return(1, (v))
  162. #define atomic_inc_return(v) atomic_add_return(1, (v))
  163. #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
  164. #define atomic64_inc_return(v) atomic64_add_return(1, (v))
  165. #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
  166. #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
  167. #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
  168. #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
  169. #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
  170. #define atomic64_inc_and_test(v) (atomic64_add_return(1, (v)) == 0)
  171. #define atomic_add(i,v) (void)atomic_add_return((i), (v))
  172. #define atomic_sub(i,v) (void)atomic_sub_return((i), (v))
  173. #define atomic_inc(v) atomic_add(1, (v))
  174. #define atomic_dec(v) atomic_sub(1, (v))
  175. #define atomic64_add(i,v) (void)atomic64_add_return((i), (v))
  176. #define atomic64_sub(i,v) (void)atomic64_sub_return((i), (v))
  177. #define atomic64_inc(v) atomic64_add(1, (v))
  178. #define atomic64_dec(v) atomic64_sub(1, (v))
  179. #endif /* _ASM_IA64_ATOMIC_H */