atomic.h 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488
  1. #ifndef _ASM_POWERPC_ATOMIC_H_
  2. #define _ASM_POWERPC_ATOMIC_H_
  3. /*
  4. * PowerPC atomic operations
  5. */
  6. #ifdef __KERNEL__
  7. #include <linux/types.h>
  8. #include <asm/cmpxchg.h>
  9. #include <asm/barrier.h>
  10. #define ATOMIC_INIT(i) { (i) }
  11. static __inline__ int atomic_read(const atomic_t *v)
  12. {
  13. int t;
  14. __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
  15. return t;
  16. }
  17. static __inline__ void atomic_set(atomic_t *v, int i)
  18. {
  19. __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
  20. }
  21. #define ATOMIC_OP(op, asm_op) \
  22. static __inline__ void atomic_##op(int a, atomic_t *v) \
  23. { \
  24. int t; \
  25. \
  26. __asm__ __volatile__( \
  27. "1: lwarx %0,0,%3 # atomic_" #op "\n" \
  28. #asm_op " %0,%2,%0\n" \
  29. PPC405_ERR77(0,%3) \
  30. " stwcx. %0,0,%3 \n" \
  31. " bne- 1b\n" \
  32. : "=&r" (t), "+m" (v->counter) \
  33. : "r" (a), "r" (&v->counter) \
  34. : "cc"); \
  35. } \
  36. #define ATOMIC_OP_RETURN(op, asm_op) \
  37. static __inline__ int atomic_##op##_return(int a, atomic_t *v) \
  38. { \
  39. int t; \
  40. \
  41. __asm__ __volatile__( \
  42. PPC_ATOMIC_ENTRY_BARRIER \
  43. "1: lwarx %0,0,%2 # atomic_" #op "_return\n" \
  44. #asm_op " %0,%1,%0\n" \
  45. PPC405_ERR77(0,%2) \
  46. " stwcx. %0,0,%2 \n" \
  47. " bne- 1b\n" \
  48. PPC_ATOMIC_EXIT_BARRIER \
  49. : "=&r" (t) \
  50. : "r" (a), "r" (&v->counter) \
  51. : "cc", "memory"); \
  52. \
  53. return t; \
  54. }
  55. #define ATOMIC_OPS(op, asm_op) ATOMIC_OP(op, asm_op) ATOMIC_OP_RETURN(op, asm_op)
  56. ATOMIC_OPS(add, add)
  57. ATOMIC_OPS(sub, subf)
  58. ATOMIC_OP(and, and)
  59. ATOMIC_OP(or, or)
  60. ATOMIC_OP(xor, xor)
  61. #undef ATOMIC_OPS
  62. #undef ATOMIC_OP_RETURN
  63. #undef ATOMIC_OP
  64. #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
  65. static __inline__ void atomic_inc(atomic_t *v)
  66. {
  67. int t;
  68. __asm__ __volatile__(
  69. "1: lwarx %0,0,%2 # atomic_inc\n\
  70. addic %0,%0,1\n"
  71. PPC405_ERR77(0,%2)
  72. " stwcx. %0,0,%2 \n\
  73. bne- 1b"
  74. : "=&r" (t), "+m" (v->counter)
  75. : "r" (&v->counter)
  76. : "cc", "xer");
  77. }
  78. static __inline__ int atomic_inc_return(atomic_t *v)
  79. {
  80. int t;
  81. __asm__ __volatile__(
  82. PPC_ATOMIC_ENTRY_BARRIER
  83. "1: lwarx %0,0,%1 # atomic_inc_return\n\
  84. addic %0,%0,1\n"
  85. PPC405_ERR77(0,%1)
  86. " stwcx. %0,0,%1 \n\
  87. bne- 1b"
  88. PPC_ATOMIC_EXIT_BARRIER
  89. : "=&r" (t)
  90. : "r" (&v->counter)
  91. : "cc", "xer", "memory");
  92. return t;
  93. }
  94. /*
  95. * atomic_inc_and_test - increment and test
  96. * @v: pointer of type atomic_t
  97. *
  98. * Atomically increments @v by 1
  99. * and returns true if the result is zero, or false for all
  100. * other cases.
  101. */
  102. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  103. static __inline__ void atomic_dec(atomic_t *v)
  104. {
  105. int t;
  106. __asm__ __volatile__(
  107. "1: lwarx %0,0,%2 # atomic_dec\n\
  108. addic %0,%0,-1\n"
  109. PPC405_ERR77(0,%2)\
  110. " stwcx. %0,0,%2\n\
  111. bne- 1b"
  112. : "=&r" (t), "+m" (v->counter)
  113. : "r" (&v->counter)
  114. : "cc", "xer");
  115. }
  116. static __inline__ int atomic_dec_return(atomic_t *v)
  117. {
  118. int t;
  119. __asm__ __volatile__(
  120. PPC_ATOMIC_ENTRY_BARRIER
  121. "1: lwarx %0,0,%1 # atomic_dec_return\n\
  122. addic %0,%0,-1\n"
  123. PPC405_ERR77(0,%1)
  124. " stwcx. %0,0,%1\n\
  125. bne- 1b"
  126. PPC_ATOMIC_EXIT_BARRIER
  127. : "=&r" (t)
  128. : "r" (&v->counter)
  129. : "cc", "xer", "memory");
  130. return t;
  131. }
  132. #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
  133. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  134. /**
  135. * __atomic_add_unless - add unless the number is a given value
  136. * @v: pointer of type atomic_t
  137. * @a: the amount to add to v...
  138. * @u: ...unless v is equal to u.
  139. *
  140. * Atomically adds @a to @v, so long as it was not @u.
  141. * Returns the old value of @v.
  142. */
  143. static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
  144. {
  145. int t;
  146. __asm__ __volatile__ (
  147. PPC_ATOMIC_ENTRY_BARRIER
  148. "1: lwarx %0,0,%1 # __atomic_add_unless\n\
  149. cmpw 0,%0,%3 \n\
  150. beq- 2f \n\
  151. add %0,%2,%0 \n"
  152. PPC405_ERR77(0,%2)
  153. " stwcx. %0,0,%1 \n\
  154. bne- 1b \n"
  155. PPC_ATOMIC_EXIT_BARRIER
  156. " subf %0,%2,%0 \n\
  157. 2:"
  158. : "=&r" (t)
  159. : "r" (&v->counter), "r" (a), "r" (u)
  160. : "cc", "memory");
  161. return t;
  162. }
  163. /**
  164. * atomic_inc_not_zero - increment unless the number is zero
  165. * @v: pointer of type atomic_t
  166. *
  167. * Atomically increments @v by 1, so long as @v is non-zero.
  168. * Returns non-zero if @v was non-zero, and zero otherwise.
  169. */
  170. static __inline__ int atomic_inc_not_zero(atomic_t *v)
  171. {
  172. int t1, t2;
  173. __asm__ __volatile__ (
  174. PPC_ATOMIC_ENTRY_BARRIER
  175. "1: lwarx %0,0,%2 # atomic_inc_not_zero\n\
  176. cmpwi 0,%0,0\n\
  177. beq- 2f\n\
  178. addic %1,%0,1\n"
  179. PPC405_ERR77(0,%2)
  180. " stwcx. %1,0,%2\n\
  181. bne- 1b\n"
  182. PPC_ATOMIC_EXIT_BARRIER
  183. "\n\
  184. 2:"
  185. : "=&r" (t1), "=&r" (t2)
  186. : "r" (&v->counter)
  187. : "cc", "xer", "memory");
  188. return t1;
  189. }
  190. #define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
  191. #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
  192. #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
  193. /*
  194. * Atomically test *v and decrement if it is greater than 0.
  195. * The function returns the old value of *v minus 1, even if
  196. * the atomic variable, v, was not decremented.
  197. */
  198. static __inline__ int atomic_dec_if_positive(atomic_t *v)
  199. {
  200. int t;
  201. __asm__ __volatile__(
  202. PPC_ATOMIC_ENTRY_BARRIER
  203. "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
  204. cmpwi %0,1\n\
  205. addi %0,%0,-1\n\
  206. blt- 2f\n"
  207. PPC405_ERR77(0,%1)
  208. " stwcx. %0,0,%1\n\
  209. bne- 1b"
  210. PPC_ATOMIC_EXIT_BARRIER
  211. "\n\
  212. 2:" : "=&b" (t)
  213. : "r" (&v->counter)
  214. : "cc", "memory");
  215. return t;
  216. }
  217. #define atomic_dec_if_positive atomic_dec_if_positive
  218. #ifdef __powerpc64__
  219. #define ATOMIC64_INIT(i) { (i) }
  220. static __inline__ long atomic64_read(const atomic64_t *v)
  221. {
  222. long t;
  223. __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
  224. return t;
  225. }
  226. static __inline__ void atomic64_set(atomic64_t *v, long i)
  227. {
  228. __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
  229. }
  230. #define ATOMIC64_OP(op, asm_op) \
  231. static __inline__ void atomic64_##op(long a, atomic64_t *v) \
  232. { \
  233. long t; \
  234. \
  235. __asm__ __volatile__( \
  236. "1: ldarx %0,0,%3 # atomic64_" #op "\n" \
  237. #asm_op " %0,%2,%0\n" \
  238. " stdcx. %0,0,%3 \n" \
  239. " bne- 1b\n" \
  240. : "=&r" (t), "+m" (v->counter) \
  241. : "r" (a), "r" (&v->counter) \
  242. : "cc"); \
  243. }
  244. #define ATOMIC64_OP_RETURN(op, asm_op) \
  245. static __inline__ long atomic64_##op##_return(long a, atomic64_t *v) \
  246. { \
  247. long t; \
  248. \
  249. __asm__ __volatile__( \
  250. PPC_ATOMIC_ENTRY_BARRIER \
  251. "1: ldarx %0,0,%2 # atomic64_" #op "_return\n" \
  252. #asm_op " %0,%1,%0\n" \
  253. " stdcx. %0,0,%2 \n" \
  254. " bne- 1b\n" \
  255. PPC_ATOMIC_EXIT_BARRIER \
  256. : "=&r" (t) \
  257. : "r" (a), "r" (&v->counter) \
  258. : "cc", "memory"); \
  259. \
  260. return t; \
  261. }
  262. #define ATOMIC64_OPS(op, asm_op) ATOMIC64_OP(op, asm_op) ATOMIC64_OP_RETURN(op, asm_op)
  263. ATOMIC64_OPS(add, add)
  264. ATOMIC64_OPS(sub, subf)
  265. ATOMIC64_OP(and, and)
  266. ATOMIC64_OP(or, or)
  267. ATOMIC64_OP(xor, xor)
  268. #undef ATOMIC64_OPS
  269. #undef ATOMIC64_OP_RETURN
  270. #undef ATOMIC64_OP
  271. #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
  272. static __inline__ void atomic64_inc(atomic64_t *v)
  273. {
  274. long t;
  275. __asm__ __volatile__(
  276. "1: ldarx %0,0,%2 # atomic64_inc\n\
  277. addic %0,%0,1\n\
  278. stdcx. %0,0,%2 \n\
  279. bne- 1b"
  280. : "=&r" (t), "+m" (v->counter)
  281. : "r" (&v->counter)
  282. : "cc", "xer");
  283. }
  284. static __inline__ long atomic64_inc_return(atomic64_t *v)
  285. {
  286. long t;
  287. __asm__ __volatile__(
  288. PPC_ATOMIC_ENTRY_BARRIER
  289. "1: ldarx %0,0,%1 # atomic64_inc_return\n\
  290. addic %0,%0,1\n\
  291. stdcx. %0,0,%1 \n\
  292. bne- 1b"
  293. PPC_ATOMIC_EXIT_BARRIER
  294. : "=&r" (t)
  295. : "r" (&v->counter)
  296. : "cc", "xer", "memory");
  297. return t;
  298. }
  299. /*
  300. * atomic64_inc_and_test - increment and test
  301. * @v: pointer of type atomic64_t
  302. *
  303. * Atomically increments @v by 1
  304. * and returns true if the result is zero, or false for all
  305. * other cases.
  306. */
  307. #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  308. static __inline__ void atomic64_dec(atomic64_t *v)
  309. {
  310. long t;
  311. __asm__ __volatile__(
  312. "1: ldarx %0,0,%2 # atomic64_dec\n\
  313. addic %0,%0,-1\n\
  314. stdcx. %0,0,%2\n\
  315. bne- 1b"
  316. : "=&r" (t), "+m" (v->counter)
  317. : "r" (&v->counter)
  318. : "cc", "xer");
  319. }
  320. static __inline__ long atomic64_dec_return(atomic64_t *v)
  321. {
  322. long t;
  323. __asm__ __volatile__(
  324. PPC_ATOMIC_ENTRY_BARRIER
  325. "1: ldarx %0,0,%1 # atomic64_dec_return\n\
  326. addic %0,%0,-1\n\
  327. stdcx. %0,0,%1\n\
  328. bne- 1b"
  329. PPC_ATOMIC_EXIT_BARRIER
  330. : "=&r" (t)
  331. : "r" (&v->counter)
  332. : "cc", "xer", "memory");
  333. return t;
  334. }
  335. #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
  336. #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
  337. /*
  338. * Atomically test *v and decrement if it is greater than 0.
  339. * The function returns the old value of *v minus 1.
  340. */
  341. static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
  342. {
  343. long t;
  344. __asm__ __volatile__(
  345. PPC_ATOMIC_ENTRY_BARRIER
  346. "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
  347. addic. %0,%0,-1\n\
  348. blt- 2f\n\
  349. stdcx. %0,0,%1\n\
  350. bne- 1b"
  351. PPC_ATOMIC_EXIT_BARRIER
  352. "\n\
  353. 2:" : "=&r" (t)
  354. : "r" (&v->counter)
  355. : "cc", "xer", "memory");
  356. return t;
  357. }
  358. #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
  359. #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
  360. /**
  361. * atomic64_add_unless - add unless the number is a given value
  362. * @v: pointer of type atomic64_t
  363. * @a: the amount to add to v...
  364. * @u: ...unless v is equal to u.
  365. *
  366. * Atomically adds @a to @v, so long as it was not @u.
  367. * Returns the old value of @v.
  368. */
  369. static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
  370. {
  371. long t;
  372. __asm__ __volatile__ (
  373. PPC_ATOMIC_ENTRY_BARRIER
  374. "1: ldarx %0,0,%1 # __atomic_add_unless\n\
  375. cmpd 0,%0,%3 \n\
  376. beq- 2f \n\
  377. add %0,%2,%0 \n"
  378. " stdcx. %0,0,%1 \n\
  379. bne- 1b \n"
  380. PPC_ATOMIC_EXIT_BARRIER
  381. " subf %0,%2,%0 \n\
  382. 2:"
  383. : "=&r" (t)
  384. : "r" (&v->counter), "r" (a), "r" (u)
  385. : "cc", "memory");
  386. return t != u;
  387. }
  388. /**
  389. * atomic_inc64_not_zero - increment unless the number is zero
  390. * @v: pointer of type atomic64_t
  391. *
  392. * Atomically increments @v by 1, so long as @v is non-zero.
  393. * Returns non-zero if @v was non-zero, and zero otherwise.
  394. */
  395. static __inline__ int atomic64_inc_not_zero(atomic64_t *v)
  396. {
  397. long t1, t2;
  398. __asm__ __volatile__ (
  399. PPC_ATOMIC_ENTRY_BARRIER
  400. "1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\
  401. cmpdi 0,%0,0\n\
  402. beq- 2f\n\
  403. addic %1,%0,1\n\
  404. stdcx. %1,0,%2\n\
  405. bne- 1b\n"
  406. PPC_ATOMIC_EXIT_BARRIER
  407. "\n\
  408. 2:"
  409. : "=&r" (t1), "=&r" (t2)
  410. : "r" (&v->counter)
  411. : "cc", "xer", "memory");
  412. return t1 != 0;
  413. }
  414. #endif /* __powerpc64__ */
  415. #endif /* __KERNEL__ */
  416. #endif /* _ASM_POWERPC_ATOMIC_H_ */