atomic.h 5.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246
  1. #ifndef _ASM_M32R_ATOMIC_H
  2. #define _ASM_M32R_ATOMIC_H
  3. /*
  4. * linux/include/asm-m32r/atomic.h
  5. *
  6. * M32R version:
  7. * Copyright (C) 2001, 2002 Hitoshi Yamamoto
  8. * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
  9. */
  10. #include <linux/types.h>
  11. #include <asm/assembler.h>
  12. #include <asm/cmpxchg.h>
  13. #include <asm/dcache_clear.h>
  14. #include <asm/barrier.h>
  15. /*
  16. * Atomic operations that C can't guarantee us. Useful for
  17. * resource counting etc..
  18. */
  19. #define ATOMIC_INIT(i) { (i) }
  20. /**
  21. * atomic_read - read atomic variable
  22. * @v: pointer of type atomic_t
  23. *
  24. * Atomically reads the value of @v.
  25. */
  26. #define atomic_read(v) READ_ONCE((v)->counter)
  27. /**
  28. * atomic_set - set atomic variable
  29. * @v: pointer of type atomic_t
  30. * @i: required value
  31. *
  32. * Atomically sets the value of @v to @i.
  33. */
  34. #define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i))
  35. #ifdef CONFIG_CHIP_M32700_TS1
  36. #define __ATOMIC_CLOBBER , "r4"
  37. #else
  38. #define __ATOMIC_CLOBBER
  39. #endif
  40. #define ATOMIC_OP(op) \
  41. static __inline__ void atomic_##op(int i, atomic_t *v) \
  42. { \
  43. unsigned long flags; \
  44. int result; \
  45. \
  46. local_irq_save(flags); \
  47. __asm__ __volatile__ ( \
  48. "# atomic_" #op " \n\t" \
  49. DCACHE_CLEAR("%0", "r4", "%1") \
  50. M32R_LOCK" %0, @%1; \n\t" \
  51. #op " %0, %2; \n\t" \
  52. M32R_UNLOCK" %0, @%1; \n\t" \
  53. : "=&r" (result) \
  54. : "r" (&v->counter), "r" (i) \
  55. : "memory" \
  56. __ATOMIC_CLOBBER \
  57. ); \
  58. local_irq_restore(flags); \
  59. } \
  60. #define ATOMIC_OP_RETURN(op) \
  61. static __inline__ int atomic_##op##_return(int i, atomic_t *v) \
  62. { \
  63. unsigned long flags; \
  64. int result; \
  65. \
  66. local_irq_save(flags); \
  67. __asm__ __volatile__ ( \
  68. "# atomic_" #op "_return \n\t" \
  69. DCACHE_CLEAR("%0", "r4", "%1") \
  70. M32R_LOCK" %0, @%1; \n\t" \
  71. #op " %0, %2; \n\t" \
  72. M32R_UNLOCK" %0, @%1; \n\t" \
  73. : "=&r" (result) \
  74. : "r" (&v->counter), "r" (i) \
  75. : "memory" \
  76. __ATOMIC_CLOBBER \
  77. ); \
  78. local_irq_restore(flags); \
  79. \
  80. return result; \
  81. }
  82. #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op)
  83. ATOMIC_OPS(add)
  84. ATOMIC_OPS(sub)
  85. ATOMIC_OP(and)
  86. ATOMIC_OP(or)
  87. ATOMIC_OP(xor)
  88. #undef ATOMIC_OPS
  89. #undef ATOMIC_OP_RETURN
  90. #undef ATOMIC_OP
  91. /**
  92. * atomic_sub_and_test - subtract value from variable and test result
  93. * @i: integer value to subtract
  94. * @v: pointer of type atomic_t
  95. *
  96. * Atomically subtracts @i from @v and returns
  97. * true if the result is zero, or false for all
  98. * other cases.
  99. */
  100. #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
  101. /**
  102. * atomic_inc_return - increment atomic variable and return it
  103. * @v: pointer of type atomic_t
  104. *
  105. * Atomically increments @v by 1 and returns the result.
  106. */
  107. static __inline__ int atomic_inc_return(atomic_t *v)
  108. {
  109. unsigned long flags;
  110. int result;
  111. local_irq_save(flags);
  112. __asm__ __volatile__ (
  113. "# atomic_inc_return \n\t"
  114. DCACHE_CLEAR("%0", "r4", "%1")
  115. M32R_LOCK" %0, @%1; \n\t"
  116. "addi %0, #1; \n\t"
  117. M32R_UNLOCK" %0, @%1; \n\t"
  118. : "=&r" (result)
  119. : "r" (&v->counter)
  120. : "memory"
  121. __ATOMIC_CLOBBER
  122. );
  123. local_irq_restore(flags);
  124. return result;
  125. }
  126. /**
  127. * atomic_dec_return - decrement atomic variable and return it
  128. * @v: pointer of type atomic_t
  129. *
  130. * Atomically decrements @v by 1 and returns the result.
  131. */
  132. static __inline__ int atomic_dec_return(atomic_t *v)
  133. {
  134. unsigned long flags;
  135. int result;
  136. local_irq_save(flags);
  137. __asm__ __volatile__ (
  138. "# atomic_dec_return \n\t"
  139. DCACHE_CLEAR("%0", "r4", "%1")
  140. M32R_LOCK" %0, @%1; \n\t"
  141. "addi %0, #-1; \n\t"
  142. M32R_UNLOCK" %0, @%1; \n\t"
  143. : "=&r" (result)
  144. : "r" (&v->counter)
  145. : "memory"
  146. __ATOMIC_CLOBBER
  147. );
  148. local_irq_restore(flags);
  149. return result;
  150. }
  151. /**
  152. * atomic_inc - increment atomic variable
  153. * @v: pointer of type atomic_t
  154. *
  155. * Atomically increments @v by 1.
  156. */
  157. #define atomic_inc(v) ((void)atomic_inc_return(v))
  158. /**
  159. * atomic_dec - decrement atomic variable
  160. * @v: pointer of type atomic_t
  161. *
  162. * Atomically decrements @v by 1.
  163. */
  164. #define atomic_dec(v) ((void)atomic_dec_return(v))
  165. /**
  166. * atomic_inc_and_test - increment and test
  167. * @v: pointer of type atomic_t
  168. *
  169. * Atomically increments @v by 1
  170. * and returns true if the result is zero, or false for all
  171. * other cases.
  172. */
  173. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  174. /**
  175. * atomic_dec_and_test - decrement and test
  176. * @v: pointer of type atomic_t
  177. *
  178. * Atomically decrements @v by 1 and
  179. * returns true if the result is 0, or false for all
  180. * other cases.
  181. */
  182. #define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
  183. /**
  184. * atomic_add_negative - add and test if negative
  185. * @v: pointer of type atomic_t
  186. * @i: integer value to add
  187. *
  188. * Atomically adds @i to @v and returns true
  189. * if the result is negative, or false when
  190. * result is greater than or equal to zero.
  191. */
  192. #define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
  193. #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  194. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  195. /**
  196. * __atomic_add_unless - add unless the number is a given value
  197. * @v: pointer of type atomic_t
  198. * @a: the amount to add to v...
  199. * @u: ...unless v is equal to u.
  200. *
  201. * Atomically adds @a to @v, so long as it was not @u.
  202. * Returns the old value of @v.
  203. */
  204. static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
  205. {
  206. int c, old;
  207. c = atomic_read(v);
  208. for (;;) {
  209. if (unlikely(c == (u)))
  210. break;
  211. old = atomic_cmpxchg((v), c, c + (a));
  212. if (likely(old == c))
  213. break;
  214. c = old;
  215. }
  216. return c;
  217. }
  218. #endif /* _ASM_M32R_ATOMIC_H */